Browse code

Merge commit 'ad71d3276fef0ee7e791e62bbfe9c4e540047417'

* commit 'ad71d3276fef0ee7e791e62bbfe9c4e540047417':
lavfi: add a QSV deinterlacing filter

Minor fixup for lavfi differences.

Merged-by: Mark Thompson <sw@jkqxz.net>

Mark Thompson authored on 2017/03/13 01:55:32
Showing 6 changed files
... ...
@@ -26,7 +26,7 @@ version <next>:
26 26
 - native Opus encoder
27 27
 - ScreenPressor decoder
28 28
 - incomplete ClearVideo decoder
29
-- Intel QSV video scaling filter
29
+- Intel QSV video scaling and deinterlacing filters
30 30
 
31 31
 version 3.2:
32 32
 - libopenmpt demuxer
... ...
@@ -3083,6 +3083,7 @@ bs2b_filter_deps="libbs2b"
3083 3083
 colormatrix_filter_deps="gpl"
3084 3084
 cover_rect_filter_deps="avcodec avformat gpl"
3085 3085
 cropdetect_filter_deps="gpl"
3086
+deinterlace_qsv_filter_deps="libmfx"
3086 3087
 deinterlace_vaapi_filter_deps="vaapi"
3087 3088
 delogo_filter_deps="gpl"
3088 3089
 deshake_filter_select="pixelutils"
... ...
@@ -156,6 +156,7 @@ OBJS-$(CONFIG_DCTDNOIZ_FILTER)               += vf_dctdnoiz.o
156 156
 OBJS-$(CONFIG_DEBAND_FILTER)                 += vf_deband.o
157 157
 OBJS-$(CONFIG_DECIMATE_FILTER)               += vf_decimate.o
158 158
 OBJS-$(CONFIG_DEFLATE_FILTER)                += vf_neighbor.o
159
+OBJS-$(CONFIG_DEINTERLACE_QSV_FILTER)        += vf_deinterlace_qsv.o
159 160
 OBJS-$(CONFIG_DEINTERLACE_VAAPI_FILTER)      += vf_deinterlace_vaapi.o
160 161
 OBJS-$(CONFIG_DEJUDDER_FILTER)               += vf_dejudder.o
161 162
 OBJS-$(CONFIG_DELOGO_FILTER)                 += vf_delogo.o
... ...
@@ -167,6 +167,7 @@ static void register_all(void)
167 167
     REGISTER_FILTER(DEBAND,         deband,         vf);
168 168
     REGISTER_FILTER(DECIMATE,       decimate,       vf);
169 169
     REGISTER_FILTER(DEFLATE,        deflate,        vf);
170
+    REGISTER_FILTER(DEINTERLACE_QSV,deinterlace_qsv,vf);
170 171
     REGISTER_FILTER(DEINTERLACE_VAAPI, deinterlace_vaapi, vf);
171 172
     REGISTER_FILTER(DEJUDDER,       dejudder,       vf);
172 173
     REGISTER_FILTER(DELOGO,         delogo,         vf);
... ...
@@ -30,7 +30,7 @@
30 30
 #include "libavutil/version.h"
31 31
 
32 32
 #define LIBAVFILTER_VERSION_MAJOR   6
33
-#define LIBAVFILTER_VERSION_MINOR  75
33
+#define LIBAVFILTER_VERSION_MINOR  76
34 34
 #define LIBAVFILTER_VERSION_MICRO 100
35 35
 
36 36
 #define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
37 37
new file mode 100644
... ...
@@ -0,0 +1,575 @@
0
+/*
1
+ * This file is part of FFmpeg.
2
+ *
3
+ * FFmpeg is free software; you can redistribute it and/or
4
+ * modify it under the terms of the GNU Lesser General Public
5
+ * License as published by the Free Software Foundation; either
6
+ * version 2.1 of the License, or (at your option) any later version.
7
+ *
8
+ * FFmpeg is distributed in the hope that it will be useful,
9
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
11
+ * Lesser General Public License for more details.
12
+ *
13
+ * You should have received a copy of the GNU Lesser General Public
14
+ * License along with FFmpeg; if not, write to the Free Software
15
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
+ */
17
+
18
+/**
19
+ * @file
20
+ * deinterlace video filter - QSV
21
+ */
22
+
23
+#include <mfx/mfxvideo.h>
24
+
25
+#include <stdio.h>
26
+#include <string.h>
27
+
28
+#include "libavutil/avstring.h"
29
+#include "libavutil/common.h"
30
+#include "libavutil/hwcontext.h"
31
+#include "libavutil/hwcontext_qsv.h"
32
+#include "libavutil/internal.h"
33
+#include "libavutil/mathematics.h"
34
+#include "libavutil/opt.h"
35
+#include "libavutil/pixdesc.h"
36
+#include "libavutil/time.h"
37
+
38
+#include "avfilter.h"
39
+#include "formats.h"
40
+#include "internal.h"
41
+#include "video.h"
42
+
43
+enum {
44
+    QSVDEINT_MORE_OUTPUT = 1,
45
+    QSVDEINT_MORE_INPUT,
46
+};
47
+
48
+typedef struct QSVFrame {
49
+    AVFrame *frame;
50
+    mfxFrameSurface1 surface;
51
+    int used;
52
+
53
+    struct QSVFrame *next;
54
+} QSVFrame;
55
+
56
+typedef struct QSVDeintContext {
57
+    const AVClass *class;
58
+
59
+    AVBufferRef *hw_frames_ctx;
60
+    /* a clone of the main session, used internally for deinterlacing */
61
+    mfxSession   session;
62
+
63
+    mfxMemId *mem_ids;
64
+    int    nb_mem_ids;
65
+
66
+    mfxFrameSurface1 **surface_ptrs;
67
+    int             nb_surface_ptrs;
68
+
69
+    mfxExtOpaqueSurfaceAlloc opaque_alloc;
70
+    mfxExtBuffer            *ext_buffers[1];
71
+
72
+    QSVFrame *work_frames;
73
+
74
+    int64_t last_pts;
75
+
76
+    int eof;
77
+} QSVDeintContext;
78
+
79
+static void qsvdeint_uninit(AVFilterContext *ctx)
80
+{
81
+    QSVDeintContext *s = ctx->priv;
82
+    QSVFrame *cur;
83
+
84
+    if (s->session) {
85
+        MFXClose(s->session);
86
+        s->session = NULL;
87
+    }
88
+    av_buffer_unref(&s->hw_frames_ctx);
89
+
90
+    cur = s->work_frames;
91
+    while (cur) {
92
+        s->work_frames = cur->next;
93
+        av_frame_free(&cur->frame);
94
+        av_freep(&cur);
95
+        cur = s->work_frames;
96
+    }
97
+
98
+    av_freep(&s->mem_ids);
99
+    s->nb_mem_ids = 0;
100
+
101
+    av_freep(&s->surface_ptrs);
102
+    s->nb_surface_ptrs = 0;
103
+}
104
+
105
+static int qsvdeint_query_formats(AVFilterContext *ctx)
106
+{
107
+    static const enum AVPixelFormat pixel_formats[] = {
108
+        AV_PIX_FMT_QSV, AV_PIX_FMT_NONE,
109
+    };
110
+    AVFilterFormats *pix_fmts  = ff_make_format_list(pixel_formats);
111
+    int ret;
112
+
113
+    if ((ret = ff_set_common_formats(ctx, pix_fmts)) < 0)
114
+        return ret;
115
+
116
+    return 0;
117
+}
118
+
119
+static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
120
+                             mfxFrameAllocResponse *resp)
121
+{
122
+    AVFilterContext *ctx = pthis;
123
+    QSVDeintContext   *s = ctx->priv;
124
+
125
+    if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
126
+        !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
127
+        !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
128
+        return MFX_ERR_UNSUPPORTED;
129
+
130
+    resp->mids           = s->mem_ids;
131
+    resp->NumFrameActual = s->nb_mem_ids;
132
+
133
+    return MFX_ERR_NONE;
134
+}
135
+
136
+static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
137
+{
138
+    return MFX_ERR_NONE;
139
+}
140
+
141
+static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
142
+{
143
+    return MFX_ERR_UNSUPPORTED;
144
+}
145
+
146
+static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
147
+{
148
+    return MFX_ERR_UNSUPPORTED;
149
+}
150
+
151
+static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
152
+{
153
+    *hdl = mid;
154
+    return MFX_ERR_NONE;
155
+}
156
+
157
+static const mfxHandleType handle_types[] = {
158
+    MFX_HANDLE_VA_DISPLAY,
159
+    MFX_HANDLE_D3D9_DEVICE_MANAGER,
160
+    MFX_HANDLE_D3D11_DEVICE,
161
+};
162
+
163
+static int init_out_session(AVFilterContext *ctx)
164
+{
165
+
166
+    QSVDeintContext                  *s = ctx->priv;
167
+    AVHWFramesContext    *hw_frames_ctx = (AVHWFramesContext*)s->hw_frames_ctx->data;
168
+    AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
169
+    AVQSVDeviceContext    *device_hwctx = hw_frames_ctx->device_ctx->hwctx;
170
+
171
+    int opaque = !!(hw_frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
172
+
173
+    mfxHDL handle = NULL;
174
+    mfxHandleType handle_type;
175
+    mfxVersion ver;
176
+    mfxIMPL impl;
177
+    mfxVideoParam par;
178
+    mfxStatus err;
179
+    int i;
180
+
181
+    /* extract the properties of the "master" session given to us */
182
+    err = MFXQueryIMPL(device_hwctx->session, &impl);
183
+    if (err == MFX_ERR_NONE)
184
+        err = MFXQueryVersion(device_hwctx->session, &ver);
185
+    if (err != MFX_ERR_NONE) {
186
+        av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
187
+        return AVERROR_UNKNOWN;
188
+    }
189
+
190
+    for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
191
+        err = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
192
+        if (err == MFX_ERR_NONE) {
193
+            handle_type = handle_types[i];
194
+            break;
195
+        }
196
+    }
197
+
198
+    /* create a "slave" session with those same properties, to be used for
199
+     * actual deinterlacing */
200
+    err = MFXInit(impl, &ver, &s->session);
201
+    if (err != MFX_ERR_NONE) {
202
+        av_log(ctx, AV_LOG_ERROR, "Error initializing a session for deinterlacing\n");
203
+        return AVERROR_UNKNOWN;
204
+    }
205
+
206
+    if (handle) {
207
+        err = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
208
+        if (err != MFX_ERR_NONE)
209
+            return AVERROR_UNKNOWN;
210
+    }
211
+
212
+    memset(&par, 0, sizeof(par));
213
+
214
+    if (opaque) {
215
+        s->surface_ptrs = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
216
+                                           sizeof(*s->surface_ptrs));
217
+        if (!s->surface_ptrs)
218
+            return AVERROR(ENOMEM);
219
+        for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
220
+            s->surface_ptrs[i] = hw_frames_hwctx->surfaces + i;
221
+        s->nb_surface_ptrs = hw_frames_hwctx->nb_surfaces;
222
+
223
+        s->opaque_alloc.In.Surfaces   = s->surface_ptrs;
224
+        s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs;
225
+        s->opaque_alloc.In.Type       = hw_frames_hwctx->frame_type;
226
+
227
+        s->opaque_alloc.Out = s->opaque_alloc.In;
228
+
229
+        s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
230
+        s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
231
+
232
+        s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
233
+
234
+        par.ExtParam    = s->ext_buffers;
235
+        par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
236
+
237
+        par.IOPattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY | MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
238
+    } else {
239
+        mfxFrameAllocator frame_allocator = {
240
+            .pthis  = ctx,
241
+            .Alloc  = frame_alloc,
242
+            .Lock   = frame_lock,
243
+            .Unlock = frame_unlock,
244
+            .GetHDL = frame_get_hdl,
245
+            .Free   = frame_free,
246
+        };
247
+
248
+        s->mem_ids = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
249
+                                      sizeof(*s->mem_ids));
250
+        if (!s->mem_ids)
251
+            return AVERROR(ENOMEM);
252
+        for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
253
+            s->mem_ids[i] = hw_frames_hwctx->surfaces[i].Data.MemId;
254
+        s->nb_mem_ids = hw_frames_hwctx->nb_surfaces;
255
+
256
+        err = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
257
+        if (err != MFX_ERR_NONE)
258
+            return AVERROR_UNKNOWN;
259
+
260
+        par.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY | MFX_IOPATTERN_OUT_VIDEO_MEMORY;
261
+    }
262
+
263
+    par.AsyncDepth = 1;    // TODO async
264
+
265
+    par.vpp.In = hw_frames_hwctx->surfaces[0].Info;
266
+
267
+    par.vpp.In.CropW = ctx->inputs[0]->w;
268
+    par.vpp.In.CropH = ctx->inputs[0]->h;
269
+
270
+    if (ctx->inputs[0]->frame_rate.num) {
271
+        par.vpp.In.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
272
+        par.vpp.In.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
273
+    } else {
274
+        par.vpp.In.FrameRateExtN = ctx->inputs[0]->time_base.num;
275
+        par.vpp.In.FrameRateExtD = ctx->inputs[0]->time_base.den;
276
+    }
277
+
278
+    par.vpp.Out = par.vpp.In;
279
+
280
+    if (ctx->outputs[0]->frame_rate.num) {
281
+        par.vpp.Out.FrameRateExtN = ctx->outputs[0]->frame_rate.num;
282
+        par.vpp.Out.FrameRateExtD = ctx->outputs[0]->frame_rate.den;
283
+    } else {
284
+        par.vpp.Out.FrameRateExtN = ctx->outputs[0]->time_base.num;
285
+        par.vpp.Out.FrameRateExtD = ctx->outputs[0]->time_base.den;
286
+    }
287
+
288
+    err = MFXVideoVPP_Init(s->session, &par);
289
+    if (err != MFX_ERR_NONE) {
290
+        av_log(ctx, AV_LOG_ERROR, "Error opening the VPP for deinterlacing: %d\n", err);
291
+        return AVERROR_UNKNOWN;
292
+    }
293
+
294
+    return 0;
295
+}
296
+
297
+static int qsvdeint_config_props(AVFilterLink *outlink)
298
+{
299
+    AVFilterContext *ctx = outlink->src;
300
+    AVFilterLink *inlink = ctx->inputs[0];
301
+    QSVDeintContext  *s = ctx->priv;
302
+    int ret;
303
+
304
+    qsvdeint_uninit(ctx);
305
+
306
+    s->last_pts = AV_NOPTS_VALUE;
307
+    outlink->frame_rate = av_mul_q(inlink->frame_rate,
308
+                                   (AVRational){ 2, 1 });
309
+    outlink->time_base  = av_mul_q(inlink->time_base,
310
+                                   (AVRational){ 1, 2 });
311
+
312
+    /* check that we have a hw context */
313
+    if (!inlink->hw_frames_ctx) {
314
+        av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
315
+        return AVERROR(EINVAL);
316
+    }
317
+
318
+    s->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
319
+    if (!s->hw_frames_ctx)
320
+        return AVERROR(ENOMEM);
321
+
322
+    av_buffer_unref(&outlink->hw_frames_ctx);
323
+    outlink->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
324
+    if (!outlink->hw_frames_ctx) {
325
+        qsvdeint_uninit(ctx);
326
+        return AVERROR(ENOMEM);
327
+    }
328
+
329
+    ret = init_out_session(ctx);
330
+    if (ret < 0)
331
+        return ret;
332
+
333
+
334
+    return 0;
335
+}
336
+
337
+static void clear_unused_frames(QSVDeintContext *s)
338
+{
339
+    QSVFrame *cur = s->work_frames;
340
+    while (cur) {
341
+        if (!cur->surface.Data.Locked) {
342
+            av_frame_free(&cur->frame);
343
+            cur->used = 0;
344
+        }
345
+        cur = cur->next;
346
+    }
347
+}
348
+
349
+static int get_free_frame(QSVDeintContext *s, QSVFrame **f)
350
+{
351
+    QSVFrame *frame, **last;
352
+
353
+    clear_unused_frames(s);
354
+
355
+    frame = s->work_frames;
356
+    last  = &s->work_frames;
357
+    while (frame) {
358
+        if (!frame->used) {
359
+            *f = frame;
360
+            return 0;
361
+        }
362
+
363
+        last  = &frame->next;
364
+        frame = frame->next;
365
+    }
366
+
367
+    frame = av_mallocz(sizeof(*frame));
368
+    if (!frame)
369
+        return AVERROR(ENOMEM);
370
+    *last = frame;
371
+    *f    = frame;
372
+
373
+    return 0;
374
+}
375
+
376
+static int submit_frame(AVFilterContext *ctx, AVFrame *frame,
377
+                        mfxFrameSurface1 **surface)
378
+{
379
+    QSVDeintContext *s = ctx->priv;
380
+    QSVFrame *qf;
381
+    int ret;
382
+
383
+    ret = get_free_frame(s, &qf);
384
+    if (ret < 0)
385
+        return ret;
386
+
387
+    qf->frame = frame;
388
+
389
+    qf->surface = *(mfxFrameSurface1*)qf->frame->data[3];
390
+
391
+    qf->surface.Data.Locked = 0;
392
+    qf->surface.Info.CropW  = qf->frame->width;
393
+    qf->surface.Info.CropH  = qf->frame->height;
394
+
395
+    qf->surface.Info.PicStruct = !qf->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
396
+                                 (qf->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
397
+                                                           MFX_PICSTRUCT_FIELD_BFF);
398
+    if (qf->frame->repeat_pict == 1)
399
+        qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
400
+    else if (qf->frame->repeat_pict == 2)
401
+        qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
402
+    else if (qf->frame->repeat_pict == 4)
403
+        qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
404
+
405
+    if (ctx->inputs[0]->frame_rate.num) {
406
+        qf->surface.Info.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
407
+        qf->surface.Info.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
408
+    } else {
409
+        qf->surface.Info.FrameRateExtN = ctx->inputs[0]->time_base.num;
410
+        qf->surface.Info.FrameRateExtD = ctx->inputs[0]->time_base.den;
411
+    }
412
+
413
+    qf->surface.Data.TimeStamp = av_rescale_q(qf->frame->pts,
414
+                                              ctx->inputs[0]->time_base,
415
+                                              (AVRational){1, 90000});
416
+
417
+    *surface = &qf->surface;
418
+    qf->used = 1;
419
+
420
+    return 0;
421
+}
422
+
423
+static int process_frame(AVFilterContext *ctx, const AVFrame *in,
424
+                         mfxFrameSurface1 *surf_in)
425
+{
426
+    QSVDeintContext    *s = ctx->priv;
427
+    AVFilterLink  *inlink = ctx->inputs[0];
428
+    AVFilterLink *outlink = ctx->outputs[0];
429
+
430
+    AVFrame *out;
431
+    mfxFrameSurface1 *surf_out;
432
+    mfxSyncPoint sync = NULL;
433
+    mfxStatus err;
434
+    int ret, again = 0;
435
+
436
+    out = av_frame_alloc();
437
+    if (!out)
438
+        return AVERROR(ENOMEM);
439
+
440
+    ret = av_hwframe_get_buffer(s->hw_frames_ctx, out, 0);
441
+    if (ret < 0)
442
+        goto fail;
443
+
444
+    surf_out = (mfxFrameSurface1*)out->data[3];
445
+    surf_out->Info.CropW     = outlink->w;
446
+    surf_out->Info.CropH     = outlink->h;
447
+    surf_out->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
448
+
449
+    do {
450
+        err = MFXVideoVPP_RunFrameVPPAsync(s->session, surf_in, surf_out,
451
+                                           NULL, &sync);
452
+        if (err == MFX_WRN_DEVICE_BUSY)
453
+            av_usleep(1);
454
+    } while (err == MFX_WRN_DEVICE_BUSY);
455
+
456
+    if (err == MFX_ERR_MORE_DATA) {
457
+        av_frame_free(&out);
458
+        return QSVDEINT_MORE_INPUT;
459
+    }
460
+
461
+    if ((err < 0 && err != MFX_ERR_MORE_SURFACE) || !sync) {
462
+        av_log(ctx, AV_LOG_ERROR, "Error during deinterlacing: %d\n", err);
463
+        ret = AVERROR_UNKNOWN;
464
+        goto fail;
465
+    }
466
+    if (err == MFX_ERR_MORE_SURFACE)
467
+        again = 1;
468
+
469
+    do {
470
+        err = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
471
+    } while (err == MFX_WRN_IN_EXECUTION);
472
+    if (err < 0) {
473
+        av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
474
+        ret = AVERROR_UNKNOWN;
475
+        goto fail;
476
+    }
477
+
478
+    ret = av_frame_copy_props(out, in);
479
+    if (ret < 0)
480
+        goto fail;
481
+
482
+    out->width            = outlink->w;
483
+    out->height           = outlink->h;
484
+    out->interlaced_frame = 0;
485
+
486
+    out->pts = av_rescale_q(out->pts, inlink->time_base, outlink->time_base);
487
+    if (out->pts == s->last_pts)
488
+        out->pts++;
489
+    s->last_pts = out->pts;
490
+
491
+    ret = ff_filter_frame(outlink, out);
492
+    if (ret < 0)
493
+        return ret;
494
+
495
+    return again ? QSVDEINT_MORE_OUTPUT : 0;
496
+fail:
497
+    av_frame_free(&out);
498
+    return ret;
499
+}
500
+
501
+static int qsvdeint_filter_frame(AVFilterLink *link, AVFrame *in)
502
+{
503
+    AVFilterContext *ctx = link->dst;
504
+
505
+    mfxFrameSurface1 *surf_in;
506
+    int ret;
507
+
508
+    ret = submit_frame(ctx, in, &surf_in);
509
+    if (ret < 0) {
510
+        av_frame_free(&in);
511
+        return ret;
512
+    }
513
+
514
+    do {
515
+        ret = process_frame(ctx, in, surf_in);
516
+        if (ret < 0)
517
+            return ret;
518
+    } while (ret == QSVDEINT_MORE_OUTPUT);
519
+
520
+    return 0;
521
+}
522
+
523
+static int qsvdeint_request_frame(AVFilterLink *outlink)
524
+{
525
+    AVFilterContext *ctx = outlink->src;
526
+
527
+    return ff_request_frame(ctx->inputs[0]);
528
+}
529
+
530
+#define OFFSET(x) offsetof(QSVDeintContext, x)
531
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM
532
+static const AVOption options[] = {
533
+    { NULL },
534
+};
535
+
536
+static const AVClass qsvdeint_class = {
537
+    .class_name = "deinterlace_qsv",
538
+    .item_name  = av_default_item_name,
539
+    .option     = options,
540
+    .version    = LIBAVUTIL_VERSION_INT,
541
+};
542
+
543
+static const AVFilterPad qsvdeint_inputs[] = {
544
+    {
545
+        .name         = "default",
546
+        .type         = AVMEDIA_TYPE_VIDEO,
547
+        .filter_frame = qsvdeint_filter_frame,
548
+    },
549
+    { NULL }
550
+};
551
+
552
+static const AVFilterPad qsvdeint_outputs[] = {
553
+    {
554
+        .name          = "default",
555
+        .type          = AVMEDIA_TYPE_VIDEO,
556
+        .config_props  = qsvdeint_config_props,
557
+        .request_frame = qsvdeint_request_frame,
558
+    },
559
+    { NULL }
560
+};
561
+
562
+AVFilter ff_vf_deinterlace_qsv = {
563
+    .name      = "deinterlace_qsv",
564
+    .description = NULL_IF_CONFIG_SMALL("QuickSync video deinterlacing"),
565
+
566
+    .uninit        = qsvdeint_uninit,
567
+    .query_formats = qsvdeint_query_formats,
568
+
569
+    .priv_size = sizeof(QSVDeintContext),
570
+    .priv_class = &qsvdeint_class,
571
+
572
+    .inputs    = qsvdeint_inputs,
573
+    .outputs   = qsvdeint_outputs,
574
+};