... | ... |
@@ -221,7 +221,7 @@ OBJS-$(CONFIG_LIMITER_FILTER) += vf_limiter.o |
221 | 221 |
OBJS-$(CONFIG_LOOP_FILTER) += f_loop.o |
222 | 222 |
OBJS-$(CONFIG_LUMAKEY_FILTER) += vf_lumakey.o |
223 | 223 |
OBJS-$(CONFIG_LUT_FILTER) += vf_lut.o |
224 |
-OBJS-$(CONFIG_LUT2_FILTER) += vf_lut2.o framesync.o |
|
224 |
+OBJS-$(CONFIG_LUT2_FILTER) += vf_lut2.o framesync2.o |
|
225 | 225 |
OBJS-$(CONFIG_LUT3D_FILTER) += vf_lut3d.o |
226 | 226 |
OBJS-$(CONFIG_LUTRGB_FILTER) += vf_lut.o |
227 | 227 |
OBJS-$(CONFIG_LUTYUV_FILTER) += vf_lut.o |
... | ... |
@@ -28,7 +28,7 @@ |
28 | 28 |
#include "formats.h" |
29 | 29 |
#include "internal.h" |
30 | 30 |
#include "video.h" |
31 |
-#include "framesync.h" |
|
31 |
+#include "framesync2.h" |
|
32 | 32 |
|
33 | 33 |
static const char *const var_names[] = { |
34 | 34 |
"w", ///< width of the input video |
... | ... |
@@ -206,8 +206,8 @@ static int process_frame(FFFrameSync *fs) |
206 | 206 |
AVFrame *out, *srcx, *srcy; |
207 | 207 |
int ret; |
208 | 208 |
|
209 |
- if ((ret = ff_framesync_get_frame(&s->fs, 0, &srcx, 0)) < 0 || |
|
210 |
- (ret = ff_framesync_get_frame(&s->fs, 1, &srcy, 0)) < 0) |
|
209 |
+ if ((ret = ff_framesync2_get_frame(&s->fs, 0, &srcx, 0)) < 0 || |
|
210 |
+ (ret = ff_framesync2_get_frame(&s->fs, 1, &srcy, 0)) < 0) |
|
211 | 211 |
return ret; |
212 | 212 |
|
213 | 213 |
if (ctx->is_disabled) { |
... | ... |
@@ -266,7 +266,7 @@ static int config_output(AVFilterLink *outlink) |
266 | 266 |
outlink->sample_aspect_ratio = srcx->sample_aspect_ratio; |
267 | 267 |
outlink->frame_rate = srcx->frame_rate; |
268 | 268 |
|
269 |
- if ((ret = ff_framesync_init(&s->fs, ctx, 2)) < 0) |
|
269 |
+ if ((ret = ff_framesync2_init(&s->fs, ctx, 2)) < 0) |
|
270 | 270 |
return ret; |
271 | 271 |
|
272 | 272 |
in = s->fs.in; |
... | ... |
@@ -323,32 +323,24 @@ static int config_output(AVFilterLink *outlink) |
323 | 323 |
} |
324 | 324 |
} |
325 | 325 |
|
326 |
- return ff_framesync_configure(&s->fs); |
|
326 |
+ return ff_framesync2_configure(&s->fs); |
|
327 | 327 |
} |
328 | 328 |
|
329 |
-static int filter_frame(AVFilterLink *inlink, AVFrame *buf) |
|
329 |
+static int activate(AVFilterContext *ctx) |
|
330 | 330 |
{ |
331 |
- LUT2Context *s = inlink->dst->priv; |
|
332 |
- return ff_framesync_filter_frame(&s->fs, inlink, buf); |
|
333 |
-} |
|
334 |
- |
|
335 |
-static int request_frame(AVFilterLink *outlink) |
|
336 |
-{ |
|
337 |
- LUT2Context *s = outlink->src->priv; |
|
338 |
- return ff_framesync_request_frame(&s->fs, outlink); |
|
331 |
+ LUT2Context *s = ctx->priv; |
|
332 |
+ return ff_framesync2_activate(&s->fs); |
|
339 | 333 |
} |
340 | 334 |
|
341 | 335 |
static const AVFilterPad inputs[] = { |
342 | 336 |
{ |
343 | 337 |
.name = "srcx", |
344 | 338 |
.type = AVMEDIA_TYPE_VIDEO, |
345 |
- .filter_frame = filter_frame, |
|
346 | 339 |
.config_props = config_inputx, |
347 | 340 |
}, |
348 | 341 |
{ |
349 | 342 |
.name = "srcy", |
350 | 343 |
.type = AVMEDIA_TYPE_VIDEO, |
351 |
- .filter_frame = filter_frame, |
|
352 | 344 |
.config_props = config_inputy, |
353 | 345 |
}, |
354 | 346 |
{ NULL } |
... | ... |
@@ -359,7 +351,6 @@ static const AVFilterPad outputs[] = { |
359 | 359 |
.name = "default", |
360 | 360 |
.type = AVMEDIA_TYPE_VIDEO, |
361 | 361 |
.config_props = config_output, |
362 |
- .request_frame = request_frame, |
|
363 | 362 |
}, |
364 | 363 |
{ NULL } |
365 | 364 |
}; |
... | ... |
@@ -373,6 +364,7 @@ AVFilter ff_vf_lut2 = { |
373 | 373 |
.priv_class = &lut2_class, |
374 | 374 |
.uninit = uninit, |
375 | 375 |
.query_formats = query_formats, |
376 |
+ .activate = activate, |
|
376 | 377 |
.inputs = inputs, |
377 | 378 |
.outputs = outputs, |
378 | 379 |
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL, |