libavfilter/af_astreamsync.c
e90a69e9
 /*
  * Copyright (c) 2011 Nicolas George <nicolas.george@normalesup.org>
  *
  * This file is part of FFmpeg.
  *
  * FFmpeg is free software; you can redistribute it and/or
  * modify it under the terms of the GNU Lesser General Public
  * License as published by the Free Software Foundation; either
  * version 2.1 of the License, or (at your option) any later version.
  *
  * FFmpeg is distributed in the hope that it will be useful,
  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
752344da
  * GNU Lesser General Public License for more details.
e90a69e9
  *
  * You should have received a copy of the GNU Lesser General Public
  * License along with FFmpeg; if not, write to the Free Software
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  */
 
 /**
  * @file
  * Stream (de)synchronization filter
  */
 
 #include "libavutil/eval.h"
7fdebb11
 #include "libavutil/opt.h"
e90a69e9
 #include "avfilter.h"
4522df52
 #include "audio.h"
e90a69e9
 #include "internal.h"
 
 #define QUEUE_SIZE 16
 
 static const char * const var_names[] = {
     "b1", "b2",
     "s1", "s2",
     "t1", "t2",
     NULL
 };
 
 enum var_name {
     VAR_B1, VAR_B2,
     VAR_S1, VAR_S2,
     VAR_T1, VAR_T2,
     VAR_NB
 };
 
 typedef struct {
7fdebb11
     const AVClass *class;
e90a69e9
     AVExpr *expr;
7fdebb11
     char *expr_str;
e90a69e9
     double var_values[VAR_NB];
     struct buf_queue {
a05a44e2
         AVFrame *buf[QUEUE_SIZE];
e90a69e9
         unsigned tail, nb;
         /* buf[tail] is the oldest,
            buf[(tail + nb) % QUEUE_SIZE] is where the next is added */
     } queue[2];
     int req[2];
     int next_out;
     int eof; /* bitmask, one bit for each stream */
 } AStreamSyncContext;
 
7fdebb11
 #define OFFSET(x) offsetof(AStreamSyncContext, x)
 #define FLAGS AV_OPT_FLAG_AUDIO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
 static const AVOption astreamsync_options[] = {
     { "expr", "set stream selection expression", OFFSET(expr_str), AV_OPT_TYPE_STRING, { .str = "t1-t2" }, .flags = FLAGS },
     { "e",    "set stream selection expression", OFFSET(expr_str), AV_OPT_TYPE_STRING, { .str = "t1-t2" }, .flags = FLAGS },
     { NULL }
 };
 
 AVFILTER_DEFINE_CLASS(astreamsync);
e90a69e9
 
fd6228e6
 static av_cold int init(AVFilterContext *ctx)
e90a69e9
 {
     AStreamSyncContext *as = ctx->priv;
     int r, i;
 
7fdebb11
     r = av_expr_parse(&as->expr, as->expr_str, var_names,
e90a69e9
                       NULL, NULL, NULL, NULL, 0, ctx);
     if (r < 0) {
7fdebb11
         av_log(ctx, AV_LOG_ERROR, "Error in expression \"%s\"\n", as->expr_str);
e90a69e9
         return r;
     }
     for (i = 0; i < 42; i++)
         av_expr_eval(as->expr, as->var_values, NULL); /* exercize prng */
     return 0;
 }
 
 static int query_formats(AVFilterContext *ctx)
 {
     int i;
2f2d47ab
     AVFilterFormats *formats, *rates;
1cbf7fb4
     AVFilterChannelLayouts *layouts;
e90a69e9
 
     for (i = 0; i < 2; i++) {
         formats = ctx->inputs[i]->in_formats;
c9e183b4
         ff_formats_ref(formats, &ctx->inputs[i]->out_formats);
         ff_formats_ref(formats, &ctx->outputs[i]->in_formats);
2f2d47ab
         rates = ff_all_samplerates();
c9e183b4
         ff_formats_ref(rates, &ctx->inputs[i]->out_samplerates);
         ff_formats_ref(rates, &ctx->outputs[i]->in_samplerates);
1cbf7fb4
         layouts = ctx->inputs[i]->in_channel_layouts;
         ff_channel_layouts_ref(layouts, &ctx->inputs[i]->out_channel_layouts);
         ff_channel_layouts_ref(layouts, &ctx->outputs[i]->in_channel_layouts);
e90a69e9
     }
     return 0;
 }
 
 static int config_output(AVFilterLink *outlink)
 {
     AVFilterContext *ctx = outlink->src;
     int id = outlink == ctx->outputs[1];
 
     outlink->sample_rate = ctx->inputs[id]->sample_rate;
     outlink->time_base   = ctx->inputs[id]->time_base;
     return 0;
 }
 
f8911b98
 static int send_out(AVFilterContext *ctx, int out_id)
e90a69e9
 {
     AStreamSyncContext *as = ctx->priv;
     struct buf_queue *queue = &as->queue[out_id];
a05a44e2
     AVFrame *buf = queue->buf[queue->tail];
f8911b98
     int ret;
e90a69e9
 
     queue->buf[queue->tail] = NULL;
     as->var_values[VAR_B1 + out_id]++;
a05a44e2
     as->var_values[VAR_S1 + out_id] += buf->nb_samples;
e90a69e9
     if (buf->pts != AV_NOPTS_VALUE)
         as->var_values[VAR_T1 + out_id] =
             av_q2d(ctx->outputs[out_id]->time_base) * buf->pts;
a05a44e2
     as->var_values[VAR_T1 + out_id] += buf->nb_samples /
e90a69e9
                                    (double)ctx->inputs[out_id]->sample_rate;
cd7febd3
     ret = ff_filter_frame(ctx->outputs[out_id], buf);
e90a69e9
     queue->nb--;
     queue->tail = (queue->tail + 1) % QUEUE_SIZE;
     if (as->req[out_id])
         as->req[out_id]--;
f8911b98
     return ret;
e90a69e9
 }
 
 static void send_next(AVFilterContext *ctx)
 {
     AStreamSyncContext *as = ctx->priv;
     int i;
 
     while (1) {
         if (!as->queue[as->next_out].nb)
             break;
         send_out(ctx, as->next_out);
         if (!as->eof)
             as->next_out = av_expr_eval(as->expr, as->var_values, NULL) >= 0;
     }
     for (i = 0; i < 2; i++)
         if (as->queue[i].nb == QUEUE_SIZE)
             send_out(ctx, i);
 }
 
 static int request_frame(AVFilterLink *outlink)
 {
     AVFilterContext *ctx = outlink->src;
     AStreamSyncContext *as = ctx->priv;
     int id = outlink == ctx->outputs[1];
 
     as->req[id]++;
     while (as->req[id] && !(as->eof & (1 << id))) {
         if (as->queue[as->next_out].nb) {
             send_next(ctx);
         } else {
             as->eof |= 1 << as->next_out;
1c600888
             ff_request_frame(ctx->inputs[as->next_out]);
e90a69e9
             if (as->eof & (1 << as->next_out))
                 as->next_out = !as->next_out;
         }
     }
     return 0;
 }
 
a05a44e2
 static int filter_frame(AVFilterLink *inlink, AVFrame *insamples)
e90a69e9
 {
     AVFilterContext *ctx = inlink->dst;
     AStreamSyncContext *as = ctx->priv;
     int id = inlink == ctx->inputs[1];
 
     as->queue[id].buf[(as->queue[id].tail + as->queue[id].nb++) % QUEUE_SIZE] =
         insamples;
     as->eof &= ~(1 << id);
     send_next(ctx);
f8911b98
     return 0;
e90a69e9
 }
 
2fb8ca7d
 static av_cold void uninit(AVFilterContext *ctx)
 {
     AStreamSyncContext *as = ctx->priv;
 
     av_expr_free(as->expr);
     as->expr = NULL;
 }
 
2d9d4440
 static const AVFilterPad astreamsync_inputs[] = {
     {
         .name         = "in1",
         .type         = AVMEDIA_TYPE_AUDIO,
         .filter_frame = filter_frame,
     },{
         .name         = "in2",
         .type         = AVMEDIA_TYPE_AUDIO,
         .filter_frame = filter_frame,
     },
     { NULL }
 };
 
 static const AVFilterPad astreamsync_outputs[] = {
     {
         .name          = "out1",
         .type          = AVMEDIA_TYPE_AUDIO,
         .config_props  = config_output,
         .request_frame = request_frame,
     },{
         .name          = "out2",
         .type          = AVMEDIA_TYPE_AUDIO,
         .config_props  = config_output,
         .request_frame = request_frame,
     },
     { NULL }
 };
 
e90a69e9
 AVFilter avfilter_af_astreamsync = {
     .name          = "astreamsync",
     .description   = NULL_IF_CONFIG_SMALL("Copy two streams of audio data "
                                           "in a configurable order."),
     .priv_size     = sizeof(AStreamSyncContext),
     .init          = init,
2fb8ca7d
     .uninit        = uninit,
e90a69e9
     .query_formats = query_formats,
2d9d4440
     .inputs        = astreamsync_inputs,
     .outputs       = astreamsync_outputs,
7fdebb11
     .priv_class    = &astreamsync_class,
e90a69e9
 };