Browse code

hwcontext: add a VDPAU implementation

Anton Khirnov authored on 2015/12/07 23:04:57
Showing 6 changed files
... ...
@@ -17,6 +17,8 @@ API changes, most recent first:
17 17
   xxxxxxx buffer.h - Add av_buffer_pool_init2().
18 18
   xxxxxxx hwcontext.h - Add a new installed header hwcontext.h with a new API
19 19
                         for handling hwaccel frames.
20
+  xxxxxxx hwcontext_vdpau.h - Add a new installed header hwcontext_vdpau.h with
21
+                              VDPAU-specific hwcontext definitions.
20 22
 
21 23
 2016-xx-xx - xxxxxxx - lavf 57.3.0 - avformat.h
22 24
   Add AVFormatContext.opaque, io_open and io_close, allowing custom IO
... ...
@@ -24,6 +24,7 @@ HEADERS = adler32.h                                                     \
24 24
           frame.h                                                       \
25 25
           hmac.h                                                        \
26 26
           hwcontext.h                                                   \
27
+          hwcontext_vdpau.h                                             \
27 28
           imgutils.h                                                    \
28 29
           intfloat.h                                                    \
29 30
           intreadwrite.h                                                \
... ...
@@ -105,6 +106,7 @@ OBJS = adler32.o                                                        \
105 105
        xtea.o                                                           \
106 106
 
107 107
 OBJS-$(CONFIG_LZO)                      += lzo.o
108
+OBJS-$(CONFIG_VDPAU)                    += hwcontext_vdpau.o
108 109
 
109 110
 OBJS += $(COMPAT_OBJS:%=../compat/%)
110 111
 
... ...
@@ -29,6 +29,9 @@
29 29
 #include "pixfmt.h"
30 30
 
31 31
 static const HWContextType *hw_table[] = {
32
+#if CONFIG_VDPAU
33
+    &ff_hwcontext_type_vdpau,
34
+#endif
32 35
     NULL,
33 36
 };
34 37
 
... ...
@@ -86,4 +86,6 @@ struct AVHWFramesInternal {
86 86
     AVBufferPool *pool_internal;
87 87
 };
88 88
 
89
+extern const HWContextType ff_hwcontext_type_vdpau;
90
+
89 91
 #endif /* AVUTIL_HWCONTEXT_INTERNAL_H */
90 92
new file mode 100644
... ...
@@ -0,0 +1,408 @@
0
+/*
1
+ * This file is part of Libav.
2
+ *
3
+ * Libav is free software; you can redistribute it and/or
4
+ * modify it under the terms of the GNU Lesser General Public
5
+ * License as published by the Free Software Foundation; either
6
+ * version 2.1 of the License, or (at your option) any later version.
7
+ *
8
+ * Libav is distributed in the hope that it will be useful,
9
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
11
+ * Lesser General Public License for more details.
12
+ *
13
+ * You should have received a copy of the GNU Lesser General Public
14
+ * License along with Libav; if not, write to the Free Software
15
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
+ */
17
+
18
+#include <stdint.h>
19
+#include <string.h>
20
+
21
+#include <vdpau/vdpau.h>
22
+
23
+#include "buffer.h"
24
+#include "common.h"
25
+#include "hwcontext.h"
26
+#include "hwcontext_internal.h"
27
+#include "hwcontext_vdpau.h"
28
+#include "mem.h"
29
+#include "pixfmt.h"
30
+#include "pixdesc.h"
31
+
32
+typedef struct VDPAUDeviceContext {
33
+    VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
34
+    VdpVideoSurfaceGetBitsYCbCr                     *get_data;
35
+    VdpVideoSurfacePutBitsYCbCr                     *put_data;
36
+    VdpVideoSurfaceCreate                           *surf_create;
37
+    VdpVideoSurfaceDestroy                          *surf_destroy;
38
+
39
+    enum AVPixelFormat *pix_fmts[3];
40
+    int              nb_pix_fmts[3];
41
+} VDPAUDeviceContext;
42
+
43
+typedef struct VDPAUFramesContext {
44
+    VdpVideoSurfaceGetBitsYCbCr *get_data;
45
+    VdpVideoSurfacePutBitsYCbCr *put_data;
46
+    VdpChromaType chroma_type;
47
+    int chroma_idx;
48
+
49
+    const enum AVPixelFormat *pix_fmts;
50
+    int                       nb_pix_fmts;
51
+} VDPAUFramesContext;
52
+
53
+typedef struct VDPAUPixFmtMap {
54
+    VdpYCbCrFormat vdpau_fmt;
55
+    enum AVPixelFormat pix_fmt;
56
+} VDPAUPixFmtMap;
57
+
58
+static const VDPAUPixFmtMap pix_fmts_420[] = {
59
+    { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12    },
60
+    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
61
+    { 0,                     AV_PIX_FMT_NONE,   },
62
+};
63
+
64
+static const VDPAUPixFmtMap pix_fmts_422[] = {
65
+    { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16    },
66
+    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
67
+    { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
68
+    { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
69
+    { 0,                     AV_PIX_FMT_NONE,   },
70
+};
71
+
72
+static const VDPAUPixFmtMap pix_fmts_444[] = {
73
+    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
74
+    { 0,                     AV_PIX_FMT_NONE,   },
75
+};
76
+
77
+static const struct {
78
+    VdpChromaType chroma_type;
79
+    const VDPAUPixFmtMap *map;
80
+} vdpau_pix_fmts[] = {
81
+    { VDP_CHROMA_TYPE_420, pix_fmts_420 },
82
+    { VDP_CHROMA_TYPE_422, pix_fmts_422 },
83
+    { VDP_CHROMA_TYPE_444, pix_fmts_444 },
84
+};
85
+
86
+static int count_pixfmts(const VDPAUPixFmtMap *map)
87
+{
88
+    int count = 0;
89
+    while (map->pix_fmt != AV_PIX_FMT_NONE) {
90
+        map++;
91
+        count++;
92
+    }
93
+    return count;
94
+}
95
+
96
+static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
97
+{
98
+    AVVDPAUDeviceContext *hwctx = ctx->hwctx;
99
+    VDPAUDeviceContext    *priv = ctx->internal->priv;
100
+    int i;
101
+
102
+    for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
103
+        const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
104
+        int nb_pix_fmts;
105
+
106
+        nb_pix_fmts = count_pixfmts(map);
107
+        priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
108
+        if (!priv->pix_fmts[i])
109
+            return AVERROR(ENOMEM);
110
+
111
+        nb_pix_fmts = 0;
112
+        while (map->pix_fmt != AV_PIX_FMT_NONE) {
113
+            VdpBool supported;
114
+            VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
115
+                                                    map->vdpau_fmt, &supported);
116
+            if (err == VDP_STATUS_OK && supported)
117
+                priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
118
+            map++;
119
+        }
120
+        priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
121
+        priv->nb_pix_fmts[i]             = nb_pix_fmts;
122
+    }
123
+
124
+    return 0;
125
+}
126
+
127
+static int vdpau_device_init(AVHWDeviceContext *ctx)
128
+{
129
+    AVVDPAUDeviceContext *hwctx = ctx->hwctx;
130
+    VDPAUDeviceContext   *priv  = ctx->internal->priv;
131
+    VdpStatus             err;
132
+    int                   ret;
133
+
134
+#define GET_CALLBACK(id, result)                                                \
135
+do {                                                                            \
136
+    void *tmp;                                                                  \
137
+    err = hwctx->get_proc_address(hwctx->device, id, &tmp);                     \
138
+    if (err != VDP_STATUS_OK) {                                                 \
139
+        av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n");     \
140
+        return AVERROR_UNKNOWN;                                                 \
141
+    }                                                                           \
142
+    priv->result = tmp;                                                         \
143
+} while (0)
144
+
145
+    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
146
+                 get_transfer_caps);
147
+    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, get_data);
148
+    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, put_data);
149
+    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE,           surf_create);
150
+    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,          surf_destroy);
151
+
152
+    ret = vdpau_init_pixmfts(ctx);
153
+    if (ret < 0) {
154
+        av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
155
+        return ret;
156
+    }
157
+
158
+    return 0;
159
+}
160
+
161
+static void vdpau_device_uninit(AVHWDeviceContext *ctx)
162
+{
163
+    VDPAUDeviceContext *priv = ctx->internal->priv;
164
+    int i;
165
+
166
+    for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
167
+        av_freep(&priv->pix_fmts[i]);
168
+}
169
+
170
+static void vdpau_buffer_free(void *opaque, uint8_t *data)
171
+{
172
+    AVHWFramesContext          *ctx = opaque;
173
+    VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
174
+    VdpVideoSurface            surf = (VdpVideoSurface)(uintptr_t)data;
175
+
176
+    device_priv->surf_destroy(surf);
177
+}
178
+
179
+static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
180
+{
181
+    AVHWFramesContext             *ctx = opaque;
182
+    VDPAUFramesContext           *priv = ctx->internal->priv;
183
+    AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
184
+    VDPAUDeviceContext    *device_priv = ctx->device_ctx->internal->priv;
185
+
186
+    AVBufferRef *ret;
187
+    VdpVideoSurface surf;
188
+    VdpStatus err;
189
+
190
+    err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
191
+                                   ctx->width, ctx->height, &surf);
192
+    if (err != VDP_STATUS_OK) {
193
+        av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
194
+        return NULL;
195
+    }
196
+
197
+    ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
198
+                           vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY);
199
+    if (!ret) {
200
+        device_priv->surf_destroy(surf);
201
+        return NULL;
202
+    }
203
+
204
+    return ret;
205
+}
206
+
207
+static int vdpau_frames_init(AVHWFramesContext *ctx)
208
+{
209
+    VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
210
+    VDPAUFramesContext        *priv = ctx->internal->priv;
211
+
212
+    int i;
213
+
214
+    switch (ctx->sw_format) {
215
+    case AV_PIX_FMT_YUV420P: priv->chroma_type = VDP_CHROMA_TYPE_420; break;
216
+    case AV_PIX_FMT_YUV422P: priv->chroma_type = VDP_CHROMA_TYPE_422; break;
217
+    case AV_PIX_FMT_YUV444P: priv->chroma_type = VDP_CHROMA_TYPE_444; break;
218
+    default:
219
+        av_log(ctx, AV_LOG_ERROR, "Unsupported data layout: %s\n",
220
+               av_get_pix_fmt_name(ctx->sw_format));
221
+        return AVERROR(ENOSYS);
222
+    }
223
+
224
+    for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
225
+        if (vdpau_pix_fmts[i].chroma_type == priv->chroma_type) {
226
+            priv->chroma_idx  = i;
227
+            priv->pix_fmts    = device_priv->pix_fmts[i];
228
+            priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
229
+            break;
230
+        }
231
+    }
232
+    if (!priv->pix_fmts) {
233
+        av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n", priv->chroma_type);
234
+        return AVERROR(ENOSYS);
235
+    }
236
+
237
+    if (!ctx->pool) {
238
+        ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
239
+                                                            vdpau_pool_alloc, NULL);
240
+        if (!ctx->internal->pool_internal)
241
+            return AVERROR(ENOMEM);
242
+    }
243
+
244
+    priv->get_data = device_priv->get_data;
245
+    priv->put_data = device_priv->put_data;
246
+
247
+    return 0;
248
+}
249
+
250
+static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
251
+{
252
+    frame->buf[0] = av_buffer_pool_get(ctx->pool);
253
+    if (!frame->buf[0])
254
+        return AVERROR(ENOMEM);
255
+
256
+    frame->data[3] = frame->buf[0]->data;
257
+    frame->format  = AV_PIX_FMT_VDPAU;
258
+    frame->width   = ctx->width;
259
+    frame->height  = ctx->height;
260
+
261
+    return 0;
262
+}
263
+
264
+static int vdpau_transfer_get_formats(AVHWFramesContext *ctx,
265
+                                      enum AVHWFrameTransferDirection dir,
266
+                                      enum AVPixelFormat **formats)
267
+{
268
+    VDPAUFramesContext *priv  = ctx->internal->priv;
269
+
270
+    enum AVPixelFormat *fmts;
271
+
272
+    if (priv->nb_pix_fmts == 1) {
273
+        av_log(ctx, AV_LOG_ERROR,
274
+               "No target formats are supported for this chroma type\n");
275
+        return AVERROR(ENOSYS);
276
+    }
277
+
278
+    fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
279
+    if (!fmts)
280
+        return AVERROR(ENOMEM);
281
+
282
+    memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
283
+    *formats = fmts;
284
+
285
+    return 0;
286
+}
287
+
288
+static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
289
+                                    const AVFrame *src)
290
+{
291
+    VDPAUFramesContext *priv = ctx->internal->priv;
292
+    VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)src->data[3];
293
+
294
+    void *data[3];
295
+    uint32_t linesize[3];
296
+
297
+    const VDPAUPixFmtMap *map;
298
+    VdpYCbCrFormat vdpau_format;
299
+    VdpStatus err;
300
+    int i;
301
+
302
+    for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
303
+        data[i] = dst->data[i];
304
+        if (dst->linesize[i] < 0 || (uint64_t)dst->linesize > UINT32_MAX) {
305
+            av_log(ctx, AV_LOG_ERROR,
306
+                   "The linesize %d cannot be represented as uint32\n",
307
+                   dst->linesize[i]);
308
+            return AVERROR(ERANGE);
309
+        }
310
+        linesize[i] = dst->linesize[i];
311
+    }
312
+
313
+    map = vdpau_pix_fmts[priv->chroma_idx].map;
314
+    for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
315
+        if (map[i].pix_fmt == dst->format) {
316
+            vdpau_format = map[i].vdpau_fmt;
317
+            break;
318
+        }
319
+    }
320
+    if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
321
+        av_log(ctx, AV_LOG_ERROR,
322
+               "Unsupported target pixel format: %s\n",
323
+               av_get_pix_fmt_name(dst->format));
324
+        return AVERROR(EINVAL);
325
+    }
326
+
327
+    if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
328
+        FFSWAP(void*, data[1], data[2]);
329
+
330
+    err = priv->get_data(surf, vdpau_format, data, linesize);
331
+    if (err != VDP_STATUS_OK) {
332
+        av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
333
+        return AVERROR_UNKNOWN;
334
+    }
335
+
336
+    return 0;
337
+}
338
+
339
+static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
340
+                                  const AVFrame *src)
341
+{
342
+    VDPAUFramesContext *priv = ctx->internal->priv;
343
+    VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
344
+
345
+    const void *data[3];
346
+    uint32_t linesize[3];
347
+
348
+    const VDPAUPixFmtMap *map;
349
+    VdpYCbCrFormat vdpau_format;
350
+    VdpStatus err;
351
+    int i;
352
+
353
+    for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
354
+        data[i] = src->data[i];
355
+        if (src->linesize[i] < 0 || (uint64_t)src->linesize > UINT32_MAX) {
356
+            av_log(ctx, AV_LOG_ERROR,
357
+                   "The linesize %d cannot be represented as uint32\n",
358
+                   src->linesize[i]);
359
+            return AVERROR(ERANGE);
360
+        }
361
+        linesize[i] = src->linesize[i];
362
+    }
363
+
364
+    map = vdpau_pix_fmts[priv->chroma_idx].map;
365
+    for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
366
+        if (map[i].pix_fmt == src->format) {
367
+            vdpau_format = map[i].vdpau_fmt;
368
+            break;
369
+        }
370
+    }
371
+    if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
372
+        av_log(ctx, AV_LOG_ERROR,
373
+               "Unsupported source pixel format: %s\n",
374
+               av_get_pix_fmt_name(src->format));
375
+        return AVERROR(EINVAL);
376
+    }
377
+
378
+    if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
379
+        FFSWAP(const void*, data[1], data[2]);
380
+
381
+    err = priv->put_data(surf, vdpau_format, data, linesize);
382
+    if (err != VDP_STATUS_OK) {
383
+        av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
384
+        return AVERROR_UNKNOWN;
385
+    }
386
+
387
+    return 0;
388
+}
389
+
390
+const HWContextType ff_hwcontext_type_vdpau = {
391
+    .type                 = AV_HWDEVICE_TYPE_VDPAU,
392
+    .name                 = "VDPAU",
393
+
394
+    .device_hwctx_size    = sizeof(AVVDPAUDeviceContext),
395
+    .device_priv_size     = sizeof(VDPAUDeviceContext),
396
+    .frames_priv_size     = sizeof(VDPAUFramesContext),
397
+
398
+    .device_init          = vdpau_device_init,
399
+    .device_uninit        = vdpau_device_uninit,
400
+    .frames_init          = vdpau_frames_init,
401
+    .frames_get_buffer    = vdpau_get_buffer,
402
+    .transfer_get_formats = vdpau_transfer_get_formats,
403
+    .transfer_data_to     = vdpau_transfer_data_to,
404
+    .transfer_data_from   = vdpau_transfer_data_from,
405
+
406
+    .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
407
+};
0 408
new file mode 100644
... ...
@@ -0,0 +1,44 @@
0
+/*
1
+ * This file is part of Libav.
2
+ *
3
+ * Libav is free software; you can redistribute it and/or
4
+ * modify it under the terms of the GNU Lesser General Public
5
+ * License as published by the Free Software Foundation; either
6
+ * version 2.1 of the License, or (at your option) any later version.
7
+ *
8
+ * Libav is distributed in the hope that it will be useful,
9
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
11
+ * Lesser General Public License for more details.
12
+ *
13
+ * You should have received a copy of the GNU Lesser General Public
14
+ * License along with Libav; if not, write to the Free Software
15
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
+ */
17
+
18
+#ifndef AVUTIL_HWCONTEXT_VDPAU_H
19
+#define AVUTIL_HWCONTEXT_VDPAU_H
20
+
21
+#include <vdpau/vdpau.h>
22
+
23
+/**
24
+ * @file
25
+ * An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
26
+ *
27
+ * This API supports dynamic frame pools. AVHWFramesContext.pool must return
28
+ * AVBufferRefs whose data pointer is a VdpVideoSurface.
29
+ */
30
+
31
+/**
32
+ * This struct is allocated as AVHWDeviceContext.hwctx
33
+ */
34
+typedef struct AVVDPAUDeviceContext {
35
+    VdpDevice          device;
36
+    VdpGetProcAddress *get_proc_address;
37
+} AVVDPAUDeviceContext;
38
+
39
+/**
40
+ * AVHWFramesContext.hwctx is currently not used
41
+ */
42
+
43
+#endif /* AVUTIL_HWCONTEXT_VDPAU_H */