Browse code

lavfi/vf_overlay: move to framesync2.

Nicolas George authored on 2017/07/18 03:46:31
Showing 2 changed files
... ...
@@ -245,7 +245,7 @@ OBJS-$(CONFIG_OCR_FILTER)                    += vf_ocr.o
245 245
 OBJS-$(CONFIG_OCV_FILTER)                    += vf_libopencv.o
246 246
 OBJS-$(CONFIG_OPENCL)                        += deshake_opencl.o unsharp_opencl.o
247 247
 OBJS-$(CONFIG_OSCILLOSCOPE_FILTER)           += vf_datascope.o
248
-OBJS-$(CONFIG_OVERLAY_FILTER)                += vf_overlay.o dualinput.o framesync.o
248
+OBJS-$(CONFIG_OVERLAY_FILTER)                += vf_overlay.o framesync2.o
249 249
 OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o
250 250
 OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o
251 251
 OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o
... ...
@@ -36,8 +36,8 @@
36 36
 #include "libavutil/opt.h"
37 37
 #include "libavutil/timestamp.h"
38 38
 #include "internal.h"
39
-#include "dualinput.h"
40 39
 #include "drawutils.h"
40
+#include "framesync2.h"
41 41
 #include "video.h"
42 42
 
43 43
 static const char *const var_names[] = {
... ...
@@ -121,7 +121,7 @@ typedef struct OverlayContext {
121 121
     int format;                 ///< OverlayFormat
122 122
     int eval_mode;              ///< EvalMode
123 123
 
124
-    FFDualInputContext dinput;
124
+    FFFrameSync fs;
125 125
 
126 126
     int main_pix_step[4];       ///< steps per pixel for each plane of the main output
127 127
     int overlay_pix_step[4];    ///< steps per pixel for each plane of the overlay
... ...
@@ -132,6 +132,8 @@ typedef struct OverlayContext {
132 132
     char *x_expr, *y_expr;
133 133
 
134 134
     int eof_action;             ///< action to take on EOF from source
135
+    int opt_shortest;
136
+    int opt_repeatlast;
135 137
 
136 138
     AVExpr *x_pexpr, *y_pexpr;
137 139
 
... ...
@@ -142,7 +144,7 @@ static av_cold void uninit(AVFilterContext *ctx)
142 142
 {
143 143
     OverlayContext *s = ctx->priv;
144 144
 
145
-    ff_dualinput_uninit(&s->dinput);
145
+    ff_framesync2_uninit(&s->fs);
146 146
     av_expr_free(s->x_pexpr); s->x_pexpr = NULL;
147 147
     av_expr_free(s->y_pexpr); s->y_pexpr = NULL;
148 148
 }
... ...
@@ -390,14 +392,20 @@ static int config_output(AVFilterLink *outlink)
390 390
     OverlayContext *s = ctx->priv;
391 391
     int ret;
392 392
 
393
-    if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
393
+    if ((ret = ff_framesync2_init_dualinput(&s->fs, ctx)) < 0)
394 394
         return ret;
395
+    if (s->opt_shortest)
396
+        s->fs.in[0].after = s->fs.in[1].after = EXT_STOP;
397
+    if (!s->opt_repeatlast) {
398
+        s->fs.in[1].after = EXT_NULL;
399
+        s->fs.in[1].sync  = 0;
400
+    }
395 401
 
396 402
     outlink->w = ctx->inputs[MAIN]->w;
397 403
     outlink->h = ctx->inputs[MAIN]->h;
398 404
     outlink->time_base = ctx->inputs[MAIN]->time_base;
399 405
 
400
-    return 0;
406
+    return ff_framesync2_configure(&s->fs);
401 407
 }
402 408
 
403 409
 // divide by 255 and round to nearest
... ...
@@ -770,11 +778,19 @@ static int config_input_main(AVFilterLink *inlink)
770 770
     return 0;
771 771
 }
772 772
 
773
-static AVFrame *do_blend(AVFilterContext *ctx, AVFrame *mainpic,
774
-                         const AVFrame *second)
773
+static int do_blend(FFFrameSync *fs)
775 774
 {
775
+    AVFilterContext *ctx = fs->parent;
776
+    AVFrame *mainpic, *second;
776 777
     OverlayContext *s = ctx->priv;
777 778
     AVFilterLink *inlink = ctx->inputs[0];
779
+    int ret;
780
+
781
+    ret = ff_framesync2_dualinput_get_writable(fs, &mainpic, &second);
782
+    if (ret < 0)
783
+        return ret;
784
+    if (!second)
785
+        return ff_filter_frame(ctx->outputs[0], mainpic);
778 786
 
779 787
     if (s->eval_mode == EVAL_MODE_FRAME) {
780 788
         int64_t pos = mainpic->pkt_pos;
... ...
@@ -799,39 +815,32 @@ static AVFrame *do_blend(AVFilterContext *ctx, AVFrame *mainpic,
799 799
     if (s->x < mainpic->width  && s->x + second->width  >= 0 ||
800 800
         s->y < mainpic->height && s->y + second->height >= 0)
801 801
         s->blend_image(ctx, mainpic, second, s->x, s->y);
802
-    return mainpic;
803
-}
804
-
805
-static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
806
-{
807
-    OverlayContext *s = inlink->dst->priv;
808
-    av_log(inlink->dst, AV_LOG_DEBUG, "Incoming frame (time:%s) from link #%d\n", av_ts2timestr(inpicref->pts, &inlink->time_base), FF_INLINK_IDX(inlink));
809
-    return ff_dualinput_filter_frame(&s->dinput, inlink, inpicref);
810
-}
811
-
812
-static int request_frame(AVFilterLink *outlink)
813
-{
814
-    OverlayContext *s = outlink->src->priv;
815
-    return ff_dualinput_request_frame(&s->dinput, outlink);
802
+    return ff_filter_frame(ctx->outputs[0], mainpic);
816 803
 }
817 804
 
818 805
 static av_cold int init(AVFilterContext *ctx)
819 806
 {
820 807
     OverlayContext *s = ctx->priv;
821 808
 
822
-    if (!s->dinput.repeatlast || s->eof_action == EOF_ACTION_PASS) {
823
-        s->dinput.repeatlast = 0;
809
+    if (!s->opt_repeatlast || s->eof_action == EOF_ACTION_PASS) {
810
+        s->opt_repeatlast = 0;
824 811
         s->eof_action = EOF_ACTION_PASS;
825 812
     }
826
-    if (s->dinput.shortest || s->eof_action == EOF_ACTION_ENDALL) {
827
-        s->dinput.shortest = 1;
813
+    if (s->opt_shortest || s->eof_action == EOF_ACTION_ENDALL) {
814
+        s->opt_shortest = 1;
828 815
         s->eof_action = EOF_ACTION_ENDALL;
829 816
     }
830 817
 
831
-    s->dinput.process = do_blend;
818
+    s->fs.on_event = do_blend;
832 819
     return 0;
833 820
 }
834 821
 
822
+static int activate(AVFilterContext *ctx)
823
+{
824
+    OverlayContext *s = ctx->priv;
825
+    return ff_framesync2_activate(&s->fs);
826
+}
827
+
835 828
 #define OFFSET(x) offsetof(OverlayContext, x)
836 829
 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
837 830
 
... ...
@@ -847,7 +856,7 @@ static const AVOption overlay_options[] = {
847 847
     { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_FRAME}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
848 848
          { "init",  "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT},  .flags = FLAGS, .unit = "eval" },
849 849
          { "frame", "eval expressions per-frame",                  0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
850
-    { "shortest", "force termination when the shortest input terminates", OFFSET(dinput.shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
850
+    { "shortest", "force termination when the shortest input terminates", OFFSET(opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
851 851
     { "format", "set output format", OFFSET(format), AV_OPT_TYPE_INT, {.i64=OVERLAY_FORMAT_YUV420}, 0, OVERLAY_FORMAT_NB-1, FLAGS, "format" },
852 852
         { "yuv420", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV420}, .flags = FLAGS, .unit = "format" },
853 853
         { "yuv422", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV422}, .flags = FLAGS, .unit = "format" },
... ...
@@ -855,7 +864,7 @@ static const AVOption overlay_options[] = {
855 855
         { "rgb",    "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_RGB},    .flags = FLAGS, .unit = "format" },
856 856
         { "gbrp",   "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_GBRP},   .flags = FLAGS, .unit = "format" },
857 857
         { "auto",   "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_AUTO},   .flags = FLAGS, .unit = "format" },
858
-    { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(dinput.repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
858
+    { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(opt_repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
859 859
     { NULL }
860 860
 };
861 861
 
... ...
@@ -866,14 +875,11 @@ static const AVFilterPad avfilter_vf_overlay_inputs[] = {
866 866
         .name         = "main",
867 867
         .type         = AVMEDIA_TYPE_VIDEO,
868 868
         .config_props = config_input_main,
869
-        .filter_frame = filter_frame,
870
-        .needs_writable = 1,
871 869
     },
872 870
     {
873 871
         .name         = "overlay",
874 872
         .type         = AVMEDIA_TYPE_VIDEO,
875 873
         .config_props = config_input_overlay,
876
-        .filter_frame = filter_frame,
877 874
     },
878 875
     { NULL }
879 876
 };
... ...
@@ -883,7 +889,6 @@ static const AVFilterPad avfilter_vf_overlay_outputs[] = {
883 883
         .name          = "default",
884 884
         .type          = AVMEDIA_TYPE_VIDEO,
885 885
         .config_props  = config_output,
886
-        .request_frame = request_frame,
887 886
     },
888 887
     { NULL }
889 888
 };
... ...
@@ -896,6 +901,7 @@ AVFilter ff_vf_overlay = {
896 896
     .priv_size     = sizeof(OverlayContext),
897 897
     .priv_class    = &overlay_class,
898 898
     .query_formats = query_formats,
899
+    .activate      = activate,
899 900
     .process_command = process_command,
900 901
     .inputs        = avfilter_vf_overlay_inputs,
901 902
     .outputs       = avfilter_vf_overlay_outputs,