libavfilter/vf_fps.c
54c5dd89
 /*
cfb398bf
  * Copyright 2007 Bobby Bingham
  * Copyright 2012 Robert Nagy <ronag89 gmail com>
  * Copyright 2012 Anton Khirnov <anton khirnov net>
e4edc567
  * Copyright 2018 Calvin Walton <calvin.walton@kepstin.ca>
cfb398bf
  *
c836f28f
  * This file is part of FFmpeg.
54c5dd89
  *
c836f28f
  * FFmpeg is free software; you can redistribute it and/or
54c5dd89
  * modify it under the terms of the GNU Lesser General Public
  * License as published by the Free Software Foundation; either
  * version 2.1 of the License, or (at your option) any later version.
  *
c836f28f
  * FFmpeg is distributed in the hope that it will be useful,
54c5dd89
  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  * Lesser General Public License for more details.
  *
  * You should have received a copy of the GNU Lesser General Public
c836f28f
  * License along with FFmpeg; if not, write to the Free Software
54c5dd89
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  */
 
 /**
  * @file
  * a filter enforcing given constant framerate
  */
 
cb8f70c9
 #include <float.h>
8f8bc923
 #include <stdint.h>
cb8f70c9
 
e4edc567
 #include "libavutil/avassert.h"
54c5dd89
 #include "libavutil/mathematics.h"
 #include "libavutil/opt.h"
 #include "avfilter.h"
e4edc567
 #include "filters.h"
803391f7
 #include "internal.h"
54c5dd89
 
62bdec80
 enum EOFAction {
     EOF_ACTION_ROUND,
     EOF_ACTION_PASS,
     EOF_ACTION_NB
 };
 
54c5dd89
 typedef struct FPSContext {
     const AVClass *class;
 
545a0b80
     double start_time;      ///< pts, in seconds, of the expected first frame
 
54c5dd89
     AVRational framerate;   ///< target framerate
77a72d34
     int rounding;           ///< AVRounding method for timestamps
62bdec80
     int eof_action;         ///< action performed for last frame in FIFO
54c5dd89
 
e4edc567
     /* Set during outlink configuration */
     int64_t  in_pts_off;    ///< input frame pts offset for start_time handling
     int64_t  out_pts_off;   ///< output frame pts offset for start_time handling
 
     /* Runtime state */
     int      status;        ///< buffered input status
     int64_t  status_pts;    ///< buffered input status timestamp
 
     AVFrame *frames[2];     ///< buffered frames
     int      frames_count;  ///< number of buffered frames
 
     int64_t  next_pts;      ///< pts of the next frame to output
 
54c5dd89
     /* statistics */
e4edc567
     int cur_frame_out;         ///< number of times current frame has been output
54c5dd89
     int frames_in;             ///< number of frames on input
     int frames_out;            ///< number of frames on output
     int dup;                   ///< number of frames duplicated
     int drop;                  ///< number of framed dropped
 } FPSContext;
 
 #define OFFSET(x) offsetof(FPSContext, x)
 #define V AV_OPT_FLAG_VIDEO_PARAM
42d621d1
 #define F AV_OPT_FLAG_FILTERING_PARAM
c17808ce
 static const AVOption fps_options[] = {
5fce4753
     { "fps", "A string describing desired output framerate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "25" }, 0, INT_MAX, V|F },
0a499d6a
     { "start_time", "Assume the first PTS should be this value.", OFFSET(start_time), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX}, -DBL_MAX, DBL_MAX, V|F },
77a72d34
     { "round", "set rounding method for timestamps", OFFSET(rounding), AV_OPT_TYPE_INT, { .i64 = AV_ROUND_NEAR_INF }, 0, 5, V|F, "round" },
0a499d6a
         { "zero", "round towards 0",                 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_ZERO     }, 0, 0, V|F, "round" },
         { "inf",  "round away from 0",               0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_INF      }, 0, 0, V|F, "round" },
         { "down", "round towards -infty",            0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_DOWN     }, 0, 0, V|F, "round" },
         { "up",   "round towards +infty",            0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_UP       }, 0, 0, V|F, "round" },
         { "near", "round to nearest",                0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_NEAR_INF }, 0, 0, V|F, "round" },
62bdec80
     { "eof_action", "action performed for last frame", OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_ROUND }, 0, EOF_ACTION_NB-1, V|F, "eof_action" },
         { "round", "round similar to other frames",  0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ROUND }, 0, 0, V|F, "eof_action" },
         { "pass",  "pass through last frame",        0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS  }, 0, 0, V|F, "eof_action" },
b211607b
     { NULL }
54c5dd89
 };
 
c17808ce
 AVFILTER_DEFINE_CLASS(fps);
54c5dd89
 
d69a4177
 static av_cold int init(AVFilterContext *ctx)
54c5dd89
 {
     FPSContext *s = ctx->priv;
 
e4edc567
     s->status_pts   = AV_NOPTS_VALUE;
     s->next_pts     = AV_NOPTS_VALUE;
7727be79
 
54c5dd89
     av_log(ctx, AV_LOG_VERBOSE, "fps=%d/%d\n", s->framerate.num, s->framerate.den);
     return 0;
 }
 
e4edc567
 /* Remove the first frame from the buffer, returning it */
 static AVFrame *shift_frame(AVFilterContext *ctx, FPSContext *s)
54c5dd89
 {
e4edc567
     AVFrame *frame;
 
     /* Must only be called when there are frames in the buffer */
     av_assert1(s->frames_count > 0);
 
     frame = s->frames[0];
     s->frames[0] = s->frames[1];
     s->frames[1] = NULL;
     s->frames_count--;
 
     /* Update statistics counters */
     s->frames_out += s->cur_frame_out;
     if (s->cur_frame_out > 1) {
         av_log(ctx, AV_LOG_DEBUG, "Duplicated frame with pts %"PRId64" %d times\n",
                frame->pts, s->cur_frame_out - 1);
         s->dup += s->cur_frame_out - 1;
     } else if (s->cur_frame_out == 0) {
         av_log(ctx, AV_LOG_DEBUG, "Dropping frame with pts %"PRId64"\n",
                frame->pts);
         s->drop++;
54c5dd89
     }
e4edc567
     s->cur_frame_out = 0;
 
     return frame;
54c5dd89
 }
 
 static av_cold void uninit(AVFilterContext *ctx)
 {
     FPSContext *s = ctx->priv;
e4edc567
 
     AVFrame *frame;
 
     while (s->frames_count > 0) {
         frame = shift_frame(ctx, s);
         av_frame_free(&frame);
54c5dd89
     }
 
     av_log(ctx, AV_LOG_VERBOSE, "%d frames in, %d frames out; %d frames dropped, "
            "%d frames duplicated.\n", s->frames_in, s->frames_out, s->drop, s->dup);
 }
 
2b2c8b22
 static int config_props(AVFilterLink* outlink)
54c5dd89
 {
2b2c8b22
     AVFilterContext *ctx    = outlink->src;
     AVFilterLink    *inlink = ctx->inputs[0];
     FPSContext      *s      = ctx->priv;
54c5dd89
 
2b2c8b22
     outlink->time_base  = av_inv_q(s->framerate);
     outlink->frame_rate = s->framerate;
54c5dd89
 
e4edc567
     /* Calculate the input and output pts offsets for start_time */
     if (s->start_time != DBL_MAX && s->start_time != AV_NOPTS_VALUE) {
         double first_pts = s->start_time * AV_TIME_BASE;
         if (first_pts < INT64_MIN || first_pts > INT64_MAX) {
             av_log(ctx, AV_LOG_ERROR, "Start time %f cannot be represented in internal time base\n",
                    s->start_time);
             return AVERROR(EINVAL);
54c5dd89
         }
e4edc567
         s->in_pts_off  = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, inlink->time_base,
                                           s->rounding | AV_ROUND_PASS_MINMAX);
2b2c8b22
         s->out_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, outlink->time_base,
e4edc567
                                           s->rounding | AV_ROUND_PASS_MINMAX);
         s->next_pts = s->out_pts_off;
         av_log(ctx, AV_LOG_VERBOSE, "Set first pts to (in:%"PRId64" out:%"PRId64") from start time %f\n",
                s->in_pts_off, s->out_pts_off, s->start_time);
54c5dd89
     }
 
e4edc567
     return 0;
54c5dd89
 }
 
e4edc567
 /* Read a frame from the input and save it in the buffer */
 static int read_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink)
54c5dd89
 {
e4edc567
     AVFrame *frame;
54c5dd89
     int ret;
e4edc567
     int64_t in_pts;
54c5dd89
 
e4edc567
     /* Must only be called when we have buffer room available */
     av_assert1(s->frames_count < 2);
 
     ret = ff_inlink_consume_frame(inlink, &frame);
     /* Caller must have run ff_inlink_check_available_frame first */
     av_assert1(ret);
     if (ret < 0)
54c5dd89
         return ret;
 
e4edc567
     /* Convert frame pts to output timebase.
      * The dance with offsets is required to match the rounding behaviour of the
      * previous version of the fps filter when using the start_time option. */
     in_pts = frame->pts;
     frame->pts = s->out_pts_off + av_rescale_q_rnd(in_pts - s->in_pts_off,
                                                    inlink->time_base, outlink->time_base,
                                                    s->rounding | AV_ROUND_PASS_MINMAX);
 
     av_log(ctx, AV_LOG_DEBUG, "Read frame with in pts %"PRId64", out pts %"PRId64"\n",
            in_pts, frame->pts);
 
     s->frames[s->frames_count++] = frame;
     s->frames_in++;
 
     return 1;
54c5dd89
 }
 
e4edc567
 /* Write a frame to the output */
 static int write_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *outlink, int *again)
54c5dd89
 {
e4edc567
     AVFrame *frame;
54c5dd89
 
e4edc567
     av_assert1(s->frames_count == 2 || (s->status && s->frames_count == 1));
043800a9
 
e4edc567
     /* We haven't yet determined the pts of the first frame */
     if (s->next_pts == AV_NOPTS_VALUE) {
         if (s->frames[0]->pts != AV_NOPTS_VALUE) {
             s->next_pts = s->frames[0]->pts;
             av_log(ctx, AV_LOG_VERBOSE, "Set first pts to %"PRId64"\n", s->next_pts);
54c5dd89
         } else {
             av_log(ctx, AV_LOG_WARNING, "Discarding initial frame(s) with no "
                    "timestamp.\n");
e4edc567
             frame = shift_frame(ctx, s);
             av_frame_free(&frame);
             *again = 1;
             return 0;
54c5dd89
         }
     }
 
e4edc567
     /* There are two conditions where we want to drop a frame:
      * - If we have two buffered frames and the second frame is acceptable
      *   as the next output frame, then drop the first buffered frame.
      * - If we have status (EOF) set, drop frames when we hit the
      *   status timestamp. */
     if ((s->frames_count == 2 && s->frames[1]->pts <= s->next_pts) ||
         (s->status            && s->status_pts     <= s->next_pts)) {
 
         frame = shift_frame(ctx, s);
         av_frame_free(&frame);
         *again = 1;
         return 0;
54c5dd89
 
e4edc567
     /* Output a copy of the first buffered frame */
     } else {
         frame = av_frame_clone(s->frames[0]);
         if (!frame)
             return AVERROR(ENOMEM);
1893c720
         // Make sure Closed Captions will not be duplicated
         av_frame_remove_side_data(s->frames[0], AV_FRAME_DATA_A53_CC);
e4edc567
         frame->pts = s->next_pts++;
54c5dd89
 
e4edc567
         av_log(ctx, AV_LOG_DEBUG, "Writing frame with pts %"PRId64" to pts %"PRId64"\n",
                s->frames[0]->pts, frame->pts);
         s->cur_frame_out++;
b794df43
         *again = 1;
e4edc567
         return ff_filter_frame(outlink, frame);
     }
 }
54c5dd89
 
e4edc567
 /* Convert status_pts to outlink timebase */
 static void update_eof_pts(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink, int64_t status_pts)
 {
     int eof_rounding = (s->eof_action == EOF_ACTION_PASS) ? AV_ROUND_UP : s->rounding;
     s->status_pts = av_rescale_q_rnd(status_pts, inlink->time_base, outlink->time_base,
                                      eof_rounding | AV_ROUND_PASS_MINMAX);
54c5dd89
 
e4edc567
     av_log(ctx, AV_LOG_DEBUG, "EOF is at pts %"PRId64"\n", s->status_pts);
 }
54c5dd89
 
e4edc567
 static int activate(AVFilterContext *ctx)
 {
     FPSContext   *s       = ctx->priv;
     AVFilterLink *inlink  = ctx->inputs[0];
     AVFilterLink *outlink = ctx->outputs[0];
54c5dd89
 
e4edc567
     int ret;
     int again = 0;
     int64_t status_pts;
043800a9
 
e4edc567
     FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
043800a9
 
e4edc567
     /* No buffered status: normal operation */
     if (!s->status) {
043800a9
 
e4edc567
         /* Read available input frames if we have room */
         while (s->frames_count < 2 && ff_inlink_check_available_frame(inlink)) {
             ret = read_frame(ctx, s, inlink, outlink);
             if (ret < 0)
                 return ret;
54c5dd89
         }
 
e4edc567
         /* We do not yet have enough frames to produce output */
         if (s->frames_count < 2) {
             /* Check if we've hit EOF (or otherwise that an error status is set) */
             ret = ff_inlink_acknowledge_status(inlink, &s->status, &status_pts);
             if (ret > 0)
                 update_eof_pts(ctx, s, inlink, outlink, status_pts);
 
             if (!ret) {
                 /* If someone wants us to output, we'd better ask for more input */
                 FF_FILTER_FORWARD_WANTED(outlink, inlink);
                 return 0;
             }
3825b526
         }
e4edc567
     }
3825b526
 
e4edc567
     /* Buffered frames are available, so generate an output frame */
     if (s->frames_count > 0) {
         ret = write_frame(ctx, s, outlink, &again);
         /* Couldn't generate a frame, so schedule us to perform another step */
         if (again)
             ff_filter_set_ready(ctx, 100);
         return ret;
54c5dd89
     }
 
e4edc567
     /* No frames left, so forward the status */
     if (s->status && s->frames_count == 0) {
         ff_outlink_set_status(outlink, s->status, s->next_pts);
         return 0;
     }
d4f89906
 
e4edc567
     return FFERROR_NOT_READY;
54c5dd89
 }
 
568c70e7
 static const AVFilterPad avfilter_vf_fps_inputs[] = {
     {
b211607b
         .name         = "default",
         .type         = AVMEDIA_TYPE_VIDEO,
568c70e7
     },
     { NULL }
 };
 
 static const AVFilterPad avfilter_vf_fps_outputs[] = {
     {
         .name          = "default",
         .type          = AVMEDIA_TYPE_VIDEO,
e4edc567
         .config_props  = config_props,
568c70e7
     },
     { NULL }
 };
 
cd43ca04
 AVFilter ff_vf_fps = {
54c5dd89
     .name        = "fps",
dfac37af
     .description = NULL_IF_CONFIG_SMALL("Force constant framerate."),
b211607b
     .init        = init,
     .uninit      = uninit,
     .priv_size   = sizeof(FPSContext),
     .priv_class  = &fps_class,
e4edc567
     .activate    = activate,
b211607b
     .inputs      = avfilter_vf_fps_inputs,
     .outputs     = avfilter_vf_fps_outputs,
54c5dd89
 };