Browse code

avfilter: add signalstats filter

Signed-off-by: Mark Heath <silicontrip@gmail.com>
Signed-off-by: Dave Rice <dave@dericed.com>
Signed-off-by: Clément Bœsch <u@pkh.me>

Clément Bœsch authored on 2014/06/12 04:14:01
Showing 6 changed files
... ...
@@ -28,6 +28,7 @@ version <next>:
28 28
 - WebVTT encoder
29 29
 - showcqt multimedia filter
30 30
 - zoompan filter
31
+- signalstats filter
31 32
 
32 33
 
33 34
 version 2.2:
... ...
@@ -7532,6 +7532,172 @@ Swap the second and third planes of the input:
7532 7532
 ffmpeg -i INPUT -vf shuffleplanes=0:2:1:3 OUTPUT
7533 7533
 @end example
7534 7534
 
7535
+@section signalstats
7536
+Evaluate various visual metrics that assist in determining issues associated
7537
+with the digitization of analog video media.
7538
+
7539
+By default the filter will log these metadata values:
7540
+
7541
+@table @option
7542
+@item YMIN
7543
+Display the minimal Y value contained within the input frame. Expressed in
7544
+range of [0-255].
7545
+
7546
+@item YLOW
7547
+Display the Y value at the 10% percentile within the input frame. Expressed in
7548
+range of [0-255].
7549
+
7550
+@item YAVG
7551
+Display the average Y value within the input frame. Expressed in range of
7552
+[0-255].
7553
+
7554
+@item YHIGH
7555
+Display the Y value at the 90% percentile within the input frame. Expressed in
7556
+range of [0-255].
7557
+
7558
+@item YMAX
7559
+Display the maximum Y value contained within the input frame. Expressed in
7560
+range of [0-255].
7561
+
7562
+@item UMIN
7563
+Display the minimal U value contained within the input frame. Expressed in
7564
+range of [0-255].
7565
+
7566
+@item ULOW
7567
+Display the U value at the 10% percentile within the input frame. Expressed in
7568
+range of [0-255].
7569
+
7570
+@item UAVG
7571
+Display the average U value within the input frame. Expressed in range of
7572
+[0-255].
7573
+
7574
+@item UHIGH
7575
+Display the U value at the 90% percentile within the input frame. Expressed in
7576
+range of [0-255].
7577
+
7578
+@item UMAX
7579
+Display the maximum U value contained within the input frame. Expressed in
7580
+range of [0-255].
7581
+
7582
+@item VMIN
7583
+Display the minimal V value contained within the input frame. Expressed in
7584
+range of [0-255].
7585
+
7586
+@item VLOW
7587
+Display the V value at the 10% percentile within the input frame. Expressed in
7588
+range of [0-255].
7589
+
7590
+@item VAVG
7591
+Display the average V value within the input frame. Expressed in range of
7592
+[0-255].
7593
+
7594
+@item VHIGH
7595
+Display the V value at the 90% percentile within the input frame. Expressed in
7596
+range of [0-255].
7597
+
7598
+@item VMAX
7599
+Display the maximum V value contained within the input frame. Expressed in
7600
+range of [0-255].
7601
+
7602
+@item SATMIN
7603
+Display the minimal saturation value contained within the input frame.
7604
+Expressed in range of [0-~181.02].
7605
+
7606
+@item SATLOW
7607
+Display the saturation value at the 10% percentile within the input frame.
7608
+Expressed in range of [0-~181.02].
7609
+
7610
+@item SATAVG
7611
+Display the average saturation value within the input frame. Expressed in range
7612
+of [0-~181.02].
7613
+
7614
+@item SATHIGH
7615
+Display the saturation value at the 90% percentile within the input frame.
7616
+Expressed in range of [0-~181.02].
7617
+
7618
+@item SATMAX
7619
+Display the maximum saturation value contained within the input frame.
7620
+Expressed in range of [0-~181.02].
7621
+
7622
+@item HUEMED
7623
+Display the median value for hue within the input frame. Expressed in range of
7624
+[0-360].
7625
+
7626
+@item HUEAVG
7627
+Display the average value for hue within the input frame. Expressed in range of
7628
+[0-360].
7629
+
7630
+@item YDIF
7631
+Display the average of sample value difference between all values of the Y
7632
+plane in the current frame and corresponding values of the previous input frame.
7633
+Expressed in range of [0-255].
7634
+
7635
+@item UDIF
7636
+Display the average of sample value difference between all values of the U
7637
+plane in the current frame and corresponding values of the previous input frame.
7638
+Expressed in range of [0-255].
7639
+
7640
+@item VDIF
7641
+Display the average of sample value difference between all values of the V
7642
+plane in the current frame and corresponding values of the previous input frame.
7643
+Expressed in range of [0-255].
7644
+@end table
7645
+
7646
+The filter accepts the following options:
7647
+
7648
+@table @option
7649
+@item stat
7650
+@item out
7651
+
7652
+@option{stat} specify an additional form of image analysis.
7653
+@option{out} output video with the specified type of pixel highlighted.
7654
+
7655
+Both options accept the following values:
7656
+
7657
+@table @samp
7658
+@item tout
7659
+Identify @var{temporal outliers} pixels. A @var{temporal outlier} is a pixel
7660
+unlike the neighboring pixels of the same field. Examples of temporal outliers
7661
+include the results of video dropouts, head clogs, or tape tracking issues.
7662
+
7663
+@item vrep
7664
+Identify @var{vertical line repetition}. Vertical line repetition includes
7665
+similar rows of pixels within a frame. In born-digital video vertical line
7666
+repetition is common, but this pattern is uncommon in video digitized from an
7667
+analog source. When it occurs in video that results from the digitization of an
7668
+analog source it can indicate concealment from a dropout compensator.
7669
+
7670
+@item brng
7671
+Identify pixels that fall outside of legal broadcast range.
7672
+@end table
7673
+
7674
+@item color, c
7675
+Set the highlight color for the @option{out} option. The default color is
7676
+yellow.
7677
+@end table
7678
+
7679
+@subsection Examples
7680
+
7681
+@itemize
7682
+@item
7683
+Output data of various video metrics:
7684
+@example
7685
+ffprobe -f lavfi movie=example.mov,signalstats="stat=tout+vrep+brng" -show_frames
7686
+@end example
7687
+
7688
+@item
7689
+Output specific data about the minimum and maximum values of the Y plane per frame:
7690
+@example
7691
+ffprobe -f lavfi movie=example.mov,signalstats -show_entries frame_tags=lavfi.signalstats.YMAX,lavfi.signalstats.YMIN
7692
+@end example
7693
+
7694
+@item
7695
+Playback video while highlighting pixels that are outside of broadcast range in red.
7696
+@example
7697
+ffplay example.mov -vf values="out=brng:color=red"
7698
+@end example
7699
+@end itemize
7700
+
7535 7701
 @anchor{smartblur}
7536 7702
 @section smartblur
7537 7703
 
... ...
@@ -174,6 +174,7 @@ OBJS-$(CONFIG_SETSAR_FILTER)                 += vf_aspect.o
174 174
 OBJS-$(CONFIG_SETTB_FILTER)                  += settb.o
175 175
 OBJS-$(CONFIG_SHOWINFO_FILTER)               += vf_showinfo.o
176 176
 OBJS-$(CONFIG_SHUFFLEPLANES_FILTER)          += vf_shuffleplanes.o
177
+OBJS-$(CONFIG_SIGNALSTATS_FILTER)            += vf_signalstats.o
177 178
 OBJS-$(CONFIG_SMARTBLUR_FILTER)              += vf_smartblur.o
178 179
 OBJS-$(CONFIG_SPLIT_FILTER)                  += split.o
179 180
 OBJS-$(CONFIG_SPP_FILTER)                    += vf_spp.o
... ...
@@ -191,6 +191,7 @@ void avfilter_register_all(void)
191 191
     REGISTER_FILTER(SETTB,          settb,          vf);
192 192
     REGISTER_FILTER(SHOWINFO,       showinfo,       vf);
193 193
     REGISTER_FILTER(SHUFFLEPLANES,  shuffleplanes,  vf);
194
+    REGISTER_FILTER(SIGNALSTATS,    signalstats,    vf);
194 195
     REGISTER_FILTER(SMARTBLUR,      smartblur,      vf);
195 196
     REGISTER_FILTER(SPLIT,          split,          vf);
196 197
     REGISTER_FILTER(SPP,            spp,            vf);
... ...
@@ -30,7 +30,7 @@
30 30
 #include "libavutil/version.h"
31 31
 
32 32
 #define LIBAVFILTER_VERSION_MAJOR   4
33
-#define LIBAVFILTER_VERSION_MINOR   7
33
+#define LIBAVFILTER_VERSION_MINOR   8
34 34
 #define LIBAVFILTER_VERSION_MICRO 100
35 35
 
36 36
 #define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
37 37
new file mode 100644
... ...
@@ -0,0 +1,478 @@
0
+/*
1
+ * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
2
+ * Copyright (c) 2014 Clément Bœsch
3
+ * Copyright (c) 2014 Dave Rice @dericed
4
+ *
5
+ * This file is part of FFmpeg.
6
+ *
7
+ * FFmpeg is free software; you can redistribute it and/or
8
+ * modify it under the terms of the GNU Lesser General Public
9
+ * License as published by the Free Software Foundation; either
10
+ * version 2.1 of the License, or (at your option) any later version.
11
+ *
12
+ * FFmpeg is distributed in the hope that it will be useful,
13
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
+ * Lesser General Public License for more details.
16
+ *
17
+ * You should have received a copy of the GNU Lesser General Public
18
+ * License along with FFmpeg; if not, write to the Free Software
19
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
+ */
21
+
22
+#include "libavutil/opt.h"
23
+#include "libavutil/pixdesc.h"
24
+#include "internal.h"
25
+
26
+enum FilterMode {
27
+    FILTER_NONE = -1,
28
+    FILTER_TOUT,
29
+    FILTER_VREP,
30
+    FILTER_BRNG,
31
+    FILT_NUMB
32
+};
33
+
34
+typedef struct {
35
+    const AVClass *class;
36
+    int chromah;    // height of chroma plane
37
+    int chromaw;    // width of chroma plane
38
+    int hsub;       // horizontal subsampling
39
+    int vsub;       // vertical subsampling
40
+    int fs;         // pixel count per frame
41
+    int cfs;        // pixel count per frame of chroma planes
42
+    enum FilterMode outfilter;
43
+    int filters;
44
+    AVFrame *frame_prev;
45
+    char *vrep_line;
46
+    uint8_t rgba_color[4];
47
+    int yuv_color[3];
48
+} SignalstatsContext;
49
+
50
+#define OFFSET(x) offsetof(SignalstatsContext, x)
51
+#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
52
+
53
+static const AVOption signalstats_options[] = {
54
+    {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
55
+        {"tout", "analyze pixels for temporal outliers",                0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
56
+        {"vrep", "analyze video lines for vertical line repitition",    0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
57
+        {"brng", "analyze for pixels outside of broadcast range",       0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
58
+    {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
59
+        {"tout", "highlight pixels that depict temporal outliers",              0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
60
+        {"vrep", "highlight video lines that depict vertical line repitition",  0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
61
+        {"brng", "highlight pixels that are outside of broadcast range",        0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
62
+    {"c",     "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
63
+    {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
64
+    {NULL}
65
+};
66
+
67
+AVFILTER_DEFINE_CLASS(signalstats);
68
+
69
+static av_cold int init(AVFilterContext *ctx)
70
+{
71
+    uint8_t r, g, b;
72
+    SignalstatsContext *s = ctx->priv;
73
+
74
+    if (s->outfilter != FILTER_NONE)
75
+        s->filters |= 1 << s->outfilter;
76
+
77
+    r = s->rgba_color[0];
78
+    g = s->rgba_color[1];
79
+    b = s->rgba_color[2];
80
+    s->yuv_color[0] = (( 66*r + 129*g +  25*b + (1<<7)) >> 8) +  16;
81
+    s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
82
+    s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
83
+    return 0;
84
+}
85
+
86
+static av_cold void uninit(AVFilterContext *ctx)
87
+{
88
+    SignalstatsContext *s = ctx->priv;
89
+    av_frame_free(&s->frame_prev);
90
+    av_freep(&s->vrep_line);
91
+}
92
+
93
+static int query_formats(AVFilterContext *ctx)
94
+{
95
+    // TODO: add more
96
+    enum AVPixelFormat pix_fmts[] = {
97
+        AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV411P,
98
+        AV_PIX_FMT_NONE
99
+    };
100
+
101
+    ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
102
+    return 0;
103
+}
104
+
105
+static int config_props(AVFilterLink *outlink)
106
+{
107
+    AVFilterContext *ctx = outlink->src;
108
+    SignalstatsContext *s = ctx->priv;
109
+    AVFilterLink *inlink = outlink->src->inputs[0];
110
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
111
+    s->hsub = desc->log2_chroma_w;
112
+    s->vsub = desc->log2_chroma_h;
113
+
114
+    outlink->w = inlink->w;
115
+    outlink->h = inlink->h;
116
+
117
+    s->chromaw = FF_CEIL_RSHIFT(inlink->w, s->hsub);
118
+    s->chromah = FF_CEIL_RSHIFT(inlink->h, s->vsub);
119
+
120
+    s->fs = inlink->w * inlink->h;
121
+    s->cfs = s->chromaw * s->chromah;
122
+
123
+    if (s->filters & 1<<FILTER_VREP) {
124
+        s->vrep_line = av_malloc(inlink->h * sizeof(*s->vrep_line));
125
+        if (!s->vrep_line)
126
+            return AVERROR(ENOMEM);
127
+    }
128
+
129
+    return 0;
130
+}
131
+
132
+static void burn_frame(SignalstatsContext *s, AVFrame *f, int x, int y)
133
+{
134
+    const int chromax = x >> s->hsub;
135
+    const int chromay = y >> s->vsub;
136
+    f->data[0][y       * f->linesize[0] +       x] = s->yuv_color[0];
137
+    f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
138
+    f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
139
+}
140
+
141
+static int filter_brng(SignalstatsContext *s, const AVFrame *in, AVFrame *out, int y, int w, int h)
142
+{
143
+    int x, score = 0;
144
+    const int yc = y >> s->vsub;
145
+    const uint8_t *pluma    = &in->data[0][y  * in->linesize[0]];
146
+    const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
147
+    const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
148
+
149
+    for (x = 0; x < w; x++) {
150
+        const int xc = x >> s->hsub;
151
+        const int luma    = pluma[x];
152
+        const int chromau = pchromau[xc];
153
+        const int chromav = pchromav[xc];
154
+        const int filt = luma    < 16 || luma    > 235 ||
155
+                         chromau < 16 || chromau > 240 ||
156
+                         chromav < 16 || chromav > 240;
157
+        score += filt;
158
+        if (out && filt)
159
+            burn_frame(s, out, x, y);
160
+    }
161
+    return score;
162
+}
163
+
164
+static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
165
+{
166
+    return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
167
+}
168
+
169
+static int filter_tout(SignalstatsContext *s, const AVFrame *in, AVFrame *out, int y, int w, int h)
170
+{
171
+    const uint8_t *p = in->data[0];
172
+    int lw = in->linesize[0];
173
+    int x, score = 0, filt;
174
+
175
+    if (y - 1 < 0 || y + 1 >= h)
176
+        return 0;
177
+
178
+    // detect two pixels above and below (to eliminate interlace artefacts)
179
+    // should check that video format is infact interlaced.
180
+
181
+#define FILTER(i, j) \
182
+filter_tout_outlier(p[(y-j) * lw + x + i], \
183
+                    p[    y * lw + x + i], \
184
+                    p[(y+j) * lw + x + i])
185
+
186
+#define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
187
+
188
+    if (y - 2 >= 0 && y + 2 < h) {
189
+        for (x = 1; x < w - 1; x++) {
190
+            filt = FILTER3(2) && FILTER3(1);
191
+            score += filt;
192
+            if (filt && out)
193
+                burn_frame(s, out, x, y);
194
+        }
195
+    } else {
196
+        for (x = 1; x < w - 1; x++) {
197
+            filt = FILTER3(1);
198
+            score += filt;
199
+            if (filt && out)
200
+                burn_frame(s, out, x, y);
201
+        }
202
+    }
203
+    return score;
204
+}
205
+
206
+#define VREP_START 4
207
+
208
+static void filter_init_vrep(SignalstatsContext *s, const AVFrame *p, int w, int h)
209
+{
210
+    int i, y;
211
+    int lw = p->linesize[0];
212
+
213
+    for (y = VREP_START; y < h; y++) {
214
+        int totdiff = 0;
215
+        int y2lw = (y - VREP_START) * lw;
216
+        int ylw = y * lw;
217
+
218
+        for (i = 0; i < w; i++)
219
+            totdiff += abs(p->data[0][y2lw + i] - p->data[0][ylw + i]);
220
+
221
+        /* this value should be definable */
222
+        s->vrep_line[y] = totdiff < w;
223
+    }
224
+}
225
+
226
+static int filter_vrep(SignalstatsContext *s, const AVFrame *in, AVFrame *out, int y, int w, int h)
227
+{
228
+    int x, score = 0;
229
+
230
+    if (y < VREP_START)
231
+        return 0;
232
+
233
+    for (x = 0; x < w; x++) {
234
+        if (s->vrep_line[y]) {
235
+            score++;
236
+            if (out)
237
+                burn_frame(s, out, x, y);
238
+        }
239
+    }
240
+    return score;
241
+}
242
+
243
+static const struct {
244
+    const char *name;
245
+    void (*init)(SignalstatsContext *s, const AVFrame *p, int w, int h);
246
+    int (*process)(SignalstatsContext *s, const AVFrame *in, AVFrame *out, int y, int w, int h);
247
+} filters_def[] = {
248
+    {"TOUT", NULL,              filter_tout},
249
+    {"VREP", filter_init_vrep,  filter_vrep},
250
+    {"BRNG", NULL,              filter_brng},
251
+    {NULL}
252
+};
253
+
254
+#define DEPTH 256
255
+
256
+static int filter_frame(AVFilterLink *link, AVFrame *in)
257
+{
258
+    SignalstatsContext *s = link->dst->priv;
259
+    AVFilterLink *outlink = link->dst->outputs[0];
260
+    AVFrame *out = in;
261
+    int i, j;
262
+    int  w = 0,  cw = 0, // in
263
+        pw = 0, cpw = 0; // prev
264
+    int yuv, yuvu, yuvv;
265
+    int fil;
266
+    char metabuf[128];
267
+    unsigned int histy[DEPTH] = {0},
268
+                 histu[DEPTH] = {0},
269
+                 histv[DEPTH] = {0},
270
+                 histhue[360] = {0},
271
+                 histsat[DEPTH] = {0}; // limited to 8 bit data.
272
+    int miny  = -1, minu  = -1, minv  = -1;
273
+    int maxy  = -1, maxu  = -1, maxv  = -1;
274
+    int lowy  = -1, lowu  = -1, lowv  = -1;
275
+    int highy = -1, highu = -1, highv = -1;
276
+    int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
277
+    int lowp, highp, clowp, chighp;
278
+    int accy, accu, accv;
279
+    int accsat, acchue = 0;
280
+    int medhue, maxhue;
281
+    int toty = 0, totu = 0, totv = 0, totsat=0;
282
+    int tothue = 0;
283
+    int dify = 0, difu = 0, difv = 0;
284
+
285
+    int filtot[FILT_NUMB] = {0};
286
+    AVFrame *prev;
287
+
288
+    if (!s->frame_prev)
289
+        s->frame_prev = av_frame_clone(in);
290
+
291
+    prev = s->frame_prev;
292
+
293
+    if (s->outfilter != FILTER_NONE)
294
+        out = av_frame_clone(in);
295
+
296
+    for (fil = 0; fil < FILT_NUMB; fil ++)
297
+        if ((s->filters & 1<<fil) && filters_def[fil].init)
298
+            filters_def[fil].init(s, in, link->w, link->h);
299
+
300
+    // Calculate luma histogram and difference with previous frame or field.
301
+    for (j = 0; j < link->h; j++) {
302
+        for (i = 0; i < link->w; i++) {
303
+            yuv = in->data[0][w + i];
304
+            histy[yuv]++;
305
+            dify += abs(in->data[0][w + i] - prev->data[0][pw + i]);
306
+        }
307
+        w  += in->linesize[0];
308
+        pw += prev->linesize[0];
309
+    }
310
+
311
+    // Calculate chroma histogram and difference with previous frame or field.
312
+    for (j = 0; j < s->chromah; j++) {
313
+        for (i = 0; i < s->chromaw; i++) {
314
+            int sat, hue;
315
+
316
+            yuvu = in->data[1][cw+i];
317
+            yuvv = in->data[2][cw+i];
318
+            histu[yuvu]++;
319
+            difu += abs(in->data[1][cw+i] - prev->data[1][cpw+i]);
320
+            histv[yuvv]++;
321
+            difv += abs(in->data[2][cw+i] - prev->data[2][cpw+i]);
322
+
323
+            // int or round?
324
+            sat = hypot(yuvu - 128, yuvv - 128);
325
+            histsat[sat]++;
326
+            hue = floor((180 / M_PI) * atan2f(yuvu-128, yuvv-128) + 180);
327
+            histhue[hue]++;
328
+        }
329
+        cw  += in->linesize[1];
330
+        cpw += prev->linesize[1];
331
+    }
332
+
333
+    for (j = 0; j < link->h; j++) {
334
+        for (fil = 0; fil < FILT_NUMB; fil ++) {
335
+            if (s->filters & 1<<fil) {
336
+                AVFrame *dbg = out != in && s->outfilter == fil ? out : NULL;
337
+                filtot[fil] += filters_def[fil].process(s, in, dbg, j, link->w, link->h);
338
+            }
339
+        }
340
+    }
341
+
342
+    // find low / high based on histogram percentile
343
+    // these only need to be calculated once.
344
+
345
+    lowp   = lrint(s->fs  * 10 / 100.);
346
+    highp  = lrint(s->fs  * 90 / 100.);
347
+    clowp  = lrint(s->cfs * 10 / 100.);
348
+    chighp = lrint(s->cfs * 90 / 100.);
349
+
350
+    accy = accu = accv = accsat = 0;
351
+    for (fil = 0; fil < DEPTH; fil++) {
352
+        if (miny   < 0 && histy[fil])   miny = fil;
353
+        if (minu   < 0 && histu[fil])   minu = fil;
354
+        if (minv   < 0 && histv[fil])   minv = fil;
355
+        if (minsat < 0 && histsat[fil]) minsat = fil;
356
+
357
+        if (histy[fil])   maxy   = fil;
358
+        if (histu[fil])   maxu   = fil;
359
+        if (histv[fil])   maxv   = fil;
360
+        if (histsat[fil]) maxsat = fil;
361
+
362
+        toty   += histy[fil]   * fil;
363
+        totu   += histu[fil]   * fil;
364
+        totv   += histv[fil]   * fil;
365
+        totsat += histsat[fil] * fil;
366
+
367
+        accy   += histy[fil];
368
+        accu   += histu[fil];
369
+        accv   += histv[fil];
370
+        accsat += histsat[fil];
371
+
372
+        if (lowy   == -1 && accy   >=  lowp) lowy   = fil;
373
+        if (lowu   == -1 && accu   >= clowp) lowu   = fil;
374
+        if (lowv   == -1 && accv   >= clowp) lowv   = fil;
375
+        if (lowsat == -1 && accsat >= clowp) lowsat = fil;
376
+
377
+        if (highy   == -1 && accy   >=  highp) highy   = fil;
378
+        if (highu   == -1 && accu   >= chighp) highu   = fil;
379
+        if (highv   == -1 && accv   >= chighp) highv   = fil;
380
+        if (highsat == -1 && accsat >= chighp) highsat = fil;
381
+    }
382
+
383
+    maxhue = histhue[0];
384
+    medhue = -1;
385
+    for (fil = 0; fil < 360; fil++) {
386
+        tothue += histhue[fil] * fil;
387
+        acchue += histhue[fil];
388
+
389
+        if (medhue == -1 && acchue > s->cfs / 2)
390
+            medhue = fil;
391
+        if (histhue[fil] > maxhue) {
392
+            maxhue = histhue[fil];
393
+        }
394
+    }
395
+
396
+    av_frame_free(&s->frame_prev);
397
+    s->frame_prev = av_frame_clone(in);
398
+
399
+#define SET_META(key, fmt, val) do {                                \
400
+    snprintf(metabuf, sizeof(metabuf), fmt, val);                   \
401
+    av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0);   \
402
+} while (0)
403
+
404
+    SET_META("YMIN",    "%d", miny);
405
+    SET_META("YLOW",    "%d", lowy);
406
+    SET_META("YAVG",    "%g", 1.0 * toty / s->fs);
407
+    SET_META("YHIGH",   "%d", highy);
408
+    SET_META("YMAX",    "%d", maxy);
409
+
410
+    SET_META("UMIN",    "%d", minu);
411
+    SET_META("ULOW",    "%d", lowu);
412
+    SET_META("UAVG",    "%g", 1.0 * totu / s->cfs);
413
+    SET_META("UHIGH",   "%d", highu);
414
+    SET_META("UMAX",    "%d", maxu);
415
+
416
+    SET_META("VMIN",    "%d", minv);
417
+    SET_META("VLOW",    "%d", lowv);
418
+    SET_META("VAVG",    "%g", 1.0 * totv / s->cfs);
419
+    SET_META("VHIGH",   "%d", highv);
420
+    SET_META("VMAX",    "%d", maxv);
421
+
422
+    SET_META("SATMIN",  "%d", minsat);
423
+    SET_META("SATLOW",  "%d", lowsat);
424
+    SET_META("SATAVG",  "%g", 1.0 * totsat / s->cfs);
425
+    SET_META("SATHIGH", "%d", highsat);
426
+    SET_META("SATMAX",  "%d", maxsat);
427
+
428
+    SET_META("HUEMED",  "%d", medhue);
429
+    SET_META("HUEAVG",  "%g", 1.0 * tothue / s->cfs);
430
+
431
+    SET_META("YDIF",    "%g", 1.0 * dify / s->fs);
432
+    SET_META("UDIF",    "%g", 1.0 * difu / s->cfs);
433
+    SET_META("VDIF",    "%g", 1.0 * difv / s->cfs);
434
+
435
+    for (fil = 0; fil < FILT_NUMB; fil ++) {
436
+        if (s->filters & 1<<fil) {
437
+            char metaname[128];
438
+            snprintf(metabuf,  sizeof(metabuf),  "%g", 1.0 * filtot[fil] / s->fs);
439
+            snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
440
+            av_dict_set(&out->metadata, metaname, metabuf, 0);
441
+        }
442
+    }
443
+
444
+    if (in != out)
445
+        av_frame_free(&in);
446
+    return ff_filter_frame(outlink, out);
447
+}
448
+
449
+static const AVFilterPad signalstats_inputs[] = {
450
+    {
451
+        .name           = "default",
452
+        .type           = AVMEDIA_TYPE_VIDEO,
453
+        .filter_frame   = filter_frame,
454
+    },
455
+    { NULL }
456
+};
457
+
458
+static const AVFilterPad signalstats_outputs[] = {
459
+    {
460
+        .name           = "default",
461
+        .config_props   = config_props,
462
+        .type           = AVMEDIA_TYPE_VIDEO,
463
+    },
464
+    { NULL }
465
+};
466
+
467
+AVFilter ff_vf_signalstats = {
468
+    .name          = "signalstats",
469
+    .description   = "Generate statistics from video analysis.",
470
+    .init          = init,
471
+    .uninit        = uninit,
472
+    .query_formats = query_formats,
473
+    .priv_size     = sizeof(SignalstatsContext),
474
+    .inputs        = signalstats_inputs,
475
+    .outputs       = signalstats_outputs,
476
+    .priv_class    = &signalstats_class,
477
+};