Browse code

libavcodec: v4l2: add support for v4l2 mem2mem codecs

This patchset enhances Alexis Ballier's original patch and validates
it using Qualcomm's Venus hardware (driver recently landed upstream
[1]).

This has been tested on Qualcomm's DragonBoard 410c and 820c
Configure/make scripts have been validated on Ubuntu 10.04 and
16.04.

Tested decoders:
- h264
- h263
- mpeg4
- vp8
- vp9
- hevc

Tested encoders:
- h264
- h263
- mpeg4

Tested transcoding (concurrent encoding/decoding)

Some of the changes introduced:
- v4l2: code cleanup and abstractions added
- v4l2: follow the new encode/decode api.
- v4l2: fix display size for NV12 output pool.
- v4l2: handle EOS (EPIPE and draining)
- v4l2: vp8 and mpeg4 decoding and encoding.
- v4l2: hevc and vp9 support.
- v4l2: generate EOF on dequeue errors.
- v4l2: h264_mp4toannexb filtering.
- v4l2: fixed make install and fate issues.
- v4l2: codecs enabled/disabled depending on pixfmt defined
- v4l2: pass timebase/framerate to the context
- v4l2: runtime decoder reconfiguration.
- v4l2: add more frame information
- v4l2: free hardware resources on last reference being released
- v4l2: encoding: disable b-frames for upstreaming (patch required)

[1] https://lwn.net/Articles/697956/

System Level view:
v42l_m2m_enc/dec --> v4l2_m2m --> v4l2_context --> v4l2_buffers

Reviewed-by: Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
Reviewed-by: Alexis Ballier <aballier@gentoo.org>
Tested-by: Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>

Signed-off-by: wm4 <nfxjfg@googlemail.com>

Jorge Ramirez-Ortiz authored on 2017/09/21 10:55:40
Showing 14 changed files
... ...
@@ -49,6 +49,7 @@ version <next>:
49 49
 - VP9 tile threading support
50 50
 - KMS screen grabber
51 51
 - CUDA thumbnail filter
52
+- V4L2 mem2mem HW assisted codecs
52 53
 
53 54
 
54 55
 version 3.3:
... ...
@@ -185,6 +185,7 @@ Individual component options:
185 185
   --enable-filter=NAME     enable filter NAME
186 186
   --disable-filter=NAME    disable filter NAME
187 187
   --disable-filters        disable all filters
188
+  --disable-v4l2_m2m       disable V4L2 mem2mem code [autodetect]
188 189
 
189 190
 External library support:
190 191
 
... ...
@@ -1628,6 +1629,7 @@ HWACCEL_AUTODETECT_LIBRARY_LIST="
1628 1628
     vda
1629 1629
     vdpau
1630 1630
     videotoolbox
1631
+    v4l2_m2m
1631 1632
     xvmc
1632 1633
 "
1633 1634
 
... ...
@@ -2755,6 +2757,7 @@ omx_rpi_select="omx"
2755 2755
 qsvdec_select="qsv"
2756 2756
 qsvenc_select="qsv"
2757 2757
 vaapi_encode_deps="vaapi"
2758
+v4l2_m2m_deps_any="linux_videodev2_h"
2758 2759
 
2759 2760
 hwupload_cuda_filter_deps="cuda"
2760 2761
 scale_npp_filter_deps="cuda libnpp"
... ...
@@ -2765,6 +2768,8 @@ nvenc_deps="cuda"
2765 2765
 nvenc_deps_any="dlopen LoadLibrary"
2766 2766
 nvenc_encoder_deps="nvenc"
2767 2767
 
2768
+h263_v4l2m2m_decoder_deps="v4l2_m2m h263_v4l2_m2m"
2769
+h263_v4l2m2m_encoder_deps="v4l2_m2m h263_v4l2_m2m"
2768 2770
 h264_crystalhd_decoder_select="crystalhd h264_mp4toannexb_bsf h264_parser"
2769 2771
 h264_cuvid_decoder_deps="cuda cuvid"
2770 2772
 h264_cuvid_decoder_select="h264_mp4toannexb_bsf"
... ...
@@ -2783,6 +2788,8 @@ h264_vda_decoder_deps="vda"
2783 2783
 h264_vda_decoder_select="h264_decoder"
2784 2784
 h264_vdpau_decoder_deps="vdpau"
2785 2785
 h264_vdpau_decoder_select="h264_decoder"
2786
+h264_v4l2m2m_decoder_deps="v4l2_m2m h264_v4l2_m2m"
2787
+h264_v4l2m2m_encoder_deps="v4l2_m2m h264_v4l2_m2m"
2786 2788
 hevc_cuvid_decoder_deps="cuda cuvid"
2787 2789
 hevc_cuvid_decoder_select="hevc_mp4toannexb_bsf"
2788 2790
 hevc_mediacodec_decoder_deps="mediacodec"
... ...
@@ -2794,12 +2801,15 @@ hevc_qsv_encoder_deps="libmfx"
2794 2794
 hevc_qsv_encoder_select="hevcparse qsvenc"
2795 2795
 hevc_vaapi_encoder_deps="VAEncPictureParameterBufferHEVC"
2796 2796
 hevc_vaapi_encoder_select="vaapi_encode golomb"
2797
+hevc_v4l2m2m_decoder_deps="v4l2_m2m hevc_v4l2_m2m"
2798
+hevc_v4l2m2m_encoder_deps="v4l2_m2m hevc_v4l2_m2m"
2797 2799
 mjpeg_cuvid_decoder_deps="cuda cuvid"
2798 2800
 mjpeg_vaapi_encoder_deps="VAEncPictureParameterBufferJPEG"
2799 2801
 mjpeg_vaapi_encoder_select="vaapi_encode jpegtables"
2800 2802
 mpeg1_cuvid_decoder_deps="cuda cuvid"
2801 2803
 mpeg1_vdpau_decoder_deps="vdpau"
2802 2804
 mpeg1_vdpau_decoder_select="mpeg1video_decoder"
2805
+mpeg1_v4l2m2m_decoder_deps="v4l2_m2m mpeg1_v4l2_m2m"
2803 2806
 mpeg2_crystalhd_decoder_select="crystalhd"
2804 2807
 mpeg2_cuvid_decoder_deps="cuda cuvid"
2805 2808
 mpeg2_mmal_decoder_deps="mmal"
... ...
@@ -2810,6 +2820,7 @@ mpeg2_qsv_encoder_deps="libmfx"
2810 2810
 mpeg2_qsv_encoder_select="qsvenc"
2811 2811
 mpeg2_vaapi_encoder_deps="VAEncPictureParameterBufferMPEG2"
2812 2812
 mpeg2_vaapi_encoder_select="vaapi_encode"
2813
+mpeg2_v4l2m2m_decoder_deps="v4l2_m2m mpeg2_v4l2_m2m"
2813 2814
 mpeg4_crystalhd_decoder_select="crystalhd"
2814 2815
 mpeg4_cuvid_decoder_deps="cuda cuvid"
2815 2816
 mpeg4_mediacodec_decoder_deps="mediacodec"
... ...
@@ -2817,6 +2828,8 @@ mpeg4_mmal_decoder_deps="mmal"
2817 2817
 mpeg4_omx_encoder_deps="omx"
2818 2818
 mpeg4_vdpau_decoder_deps="vdpau"
2819 2819
 mpeg4_vdpau_decoder_select="mpeg4_decoder"
2820
+mpeg4_v4l2m2m_decoder_deps="v4l2_m2m mpeg4_v4l2_m2m"
2821
+mpeg4_v4l2m2m_encoder_deps="v4l2_m2m mpeg4_v4l2_m2m"
2820 2822
 mpeg_vdpau_decoder_deps="vdpau"
2821 2823
 mpeg_vdpau_decoder_select="mpeg2video_decoder"
2822 2824
 msmpeg4_crystalhd_decoder_select="crystalhd"
... ...
@@ -2827,16 +2840,20 @@ vc1_cuvid_decoder_deps="cuda cuvid"
2827 2827
 vc1_mmal_decoder_deps="mmal"
2828 2828
 vc1_vdpau_decoder_deps="vdpau"
2829 2829
 vc1_vdpau_decoder_select="vc1_decoder"
2830
+vc1_v4l2m2m_decoder_deps="v4l2_m2m vc1_v4l2_m2m"
2830 2831
 vp8_cuvid_decoder_deps="cuda cuvid"
2831 2832
 vp8_mediacodec_decoder_deps="mediacodec"
2832 2833
 vp8_qsv_decoder_deps="libmfx"
2833 2834
 vp8_qsv_decoder_select="qsvdec vp8_qsv_hwaccel vp8_parser"
2834 2835
 vp8_vaapi_encoder_deps="VAEncPictureParameterBufferVP8"
2835 2836
 vp8_vaapi_encoder_select="vaapi_encode"
2837
+vp8_v4l2m2m_decoder_deps="v4l2_m2m vp8_v4l2_m2m"
2838
+vp8_v4l2m2m_encoder_deps="v4l2_m2m vp8_v4l2_m2m"
2836 2839
 vp9_cuvid_decoder_deps="cuda cuvid"
2837 2840
 vp9_mediacodec_decoder_deps="mediacodec"
2838 2841
 vp9_vaapi_encoder_deps="VAEncPictureParameterBufferVP9"
2839 2842
 vp9_vaapi_encoder_select="vaapi_encode"
2843
+vp9_v4l2m2m_decoder_deps="v4l2_m2m vp9_v4l2_m2m"
2840 2844
 wmv3_crystalhd_decoder_select="crystalhd"
2841 2845
 wmv3_vdpau_decoder_select="vc1_vdpau_decoder"
2842 2846
 
... ...
@@ -6109,9 +6126,20 @@ perl -v            > /dev/null 2>&1 && enable perl      || disable perl
6109 6109
 pod2man --help     > /dev/null 2>&1 && enable pod2man   || disable pod2man
6110 6110
 rsync --help 2> /dev/null | grep -q 'contimeout' && enable rsync_contimeout || disable rsync_contimeout
6111 6111
 
6112
+# check V4L2 codecs available in the API
6112 6113
 check_header linux/fb.h
6113 6114
 check_header linux/videodev2.h
6114 6115
 check_code cc linux/videodev2.h "struct v4l2_frmsizeenum vfse; vfse.discrete.width = 0;" && enable_safe struct_v4l2_frmivalenum_discrete
6116
+check_code cc linux/videodev2.h "int i = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_VIDEO_M2M | V4L2_BUF_FLAG_LAST;" || disable v4l2_m2m
6117
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_VC1_ANNEX_G;" && enable vc1_v4l2_m2m
6118
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_MPEG1;" && enable mpeg1_v4l2_m2m
6119
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_MPEG2;" && enable mpeg2_v4l2_m2m
6120
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_MPEG4;" && enable mpeg4_v4l2_m2m
6121
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_HEVC;" && enable hevc_v4l2_m2m
6122
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_H263;" && enable h263_v4l2_m2m
6123
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_H264;" && enable h264_v4l2_m2m
6124
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_VP8;" && enable vp8_v4l2_m2m
6125
+check_code cc linux/videodev2.h "int i = V4L2_PIX_FMT_VP9;" && enable vp9_v4l2_m2m
6115 6126
 
6116 6127
 check_header sys/videoio.h
6117 6128
 check_code cc sys/videoio.h "struct v4l2_frmsizeenum vfse; vfse.discrete.width = 0;" && enable_safe struct_v4l2_frmivalenum_discrete
... ...
@@ -137,6 +137,7 @@ OBJS-$(CONFIG_VIDEODSP)                += videodsp.o
137 137
 OBJS-$(CONFIG_VP3DSP)                  += vp3dsp.o
138 138
 OBJS-$(CONFIG_VP56DSP)                 += vp56dsp.o
139 139
 OBJS-$(CONFIG_VP8DSP)                  += vp8dsp.o
140
+OBJS-$(CONFIG_V4L2_M2M)                += v4l2_m2m.o v4l2_context.o v4l2_buffers.o v4l2_fmt.o
140 141
 OBJS-$(CONFIG_WMA_FREQS)               += wma_freqs.o
141 142
 OBJS-$(CONFIG_WMV2DSP)                 += wmv2dsp.o
142 143
 
... ...
@@ -323,6 +324,8 @@ OBJS-$(CONFIG_H263_DECODER)            += h263dec.o h263.o ituh263dec.o        \
323 323
                                           intelh263dec.o h263data.o
324 324
 OBJS-$(CONFIG_H263_ENCODER)            += mpeg4videoenc.o mpeg4video.o  \
325 325
                                           h263.o ituh263enc.o flvenc.o h263data.o
326
+OBJS-$(CONFIG_H263_V4L2M2M_DECODER)    += v4l2_m2m_dec.o
327
+OBJS-$(CONFIG_H263_V4L2M2M_ENCODER)    += v4l2_m2m_enc.o
326 328
 OBJS-$(CONFIG_H264_DECODER)            += h264dec.o h264_cabac.o h264_cavlc.o \
327 329
                                           h264_direct.o h264_loopfilter.o  \
328 330
                                           h264_mb.o h264_picture.o \
... ...
@@ -340,6 +343,8 @@ OBJS-$(CONFIG_H264_QSV_DECODER)        += qsvdec_h2645.o
340 340
 OBJS-$(CONFIG_H264_QSV_ENCODER)        += qsvenc_h264.o
341 341
 OBJS-$(CONFIG_H264_VAAPI_ENCODER)      += vaapi_encode_h264.o vaapi_encode_h26x.o
342 342
 OBJS-$(CONFIG_H264_VIDEOTOOLBOX_ENCODER) += videotoolboxenc.o
343
+OBJS-$(CONFIG_H264_V4L2M2M_DECODER)    += v4l2_m2m_dec.o
344
+OBJS-$(CONFIG_H264_V4L2M2M_ENCODER)    += v4l2_m2m_enc.o
343 345
 OBJS-$(CONFIG_HAP_DECODER)             += hapdec.o hap.o
344 346
 OBJS-$(CONFIG_HAP_ENCODER)             += hapenc.o hap.o
345 347
 OBJS-$(CONFIG_HEVC_DECODER)            += hevcdec.o hevc_mvs.o \
... ...
@@ -353,6 +358,8 @@ OBJS-$(CONFIG_HEVC_QSV_DECODER)        += qsvdec_h2645.o
353 353
 OBJS-$(CONFIG_HEVC_QSV_ENCODER)        += qsvenc_hevc.o hevc_ps_enc.o       \
354 354
                                           hevc_data.o
355 355
 OBJS-$(CONFIG_HEVC_VAAPI_ENCODER)      += vaapi_encode_h265.o vaapi_encode_h26x.o
356
+OBJS-$(CONFIG_HEVC_V4L2M2M_DECODER)    += v4l2_m2m_dec.o
357
+OBJS-$(CONFIG_HEVC_V4L2M2M_ENCODER)    += v4l2_m2m_enc.o
356 358
 OBJS-$(CONFIG_HNM4_VIDEO_DECODER)      += hnm4video.o
357 359
 OBJS-$(CONFIG_HQ_HQA_DECODER)          += hq_hqa.o hq_hqadata.o hq_hqadsp.o \
358 360
                                           canopus.o
... ...
@@ -422,6 +429,7 @@ OBJS-$(CONFIG_MPC8_DECODER)            += mpc8.o mpc.o
422 422
 OBJS-$(CONFIG_MPEGVIDEO_DECODER)       += mpeg12dec.o mpeg12.o mpeg12data.o
423 423
 OBJS-$(CONFIG_MPEG1VIDEO_DECODER)      += mpeg12dec.o mpeg12.o mpeg12data.o
424 424
 OBJS-$(CONFIG_MPEG1VIDEO_ENCODER)      += mpeg12enc.o mpeg12.o
425
+OBJS-$(CONFIG_MPEG1_V4L2M2M_DECODER)   += v4l2_m2m_dec.o
425 426
 OBJS-$(CONFIG_MPEG2_MMAL_DECODER)      += mmaldec.o
426 427
 OBJS-$(CONFIG_MPEG2_QSV_DECODER)       += qsvdec_other.o
427 428
 OBJS-$(CONFIG_MPEG2_QSV_ENCODER)       += qsvenc_mpeg2.o
... ...
@@ -429,9 +437,12 @@ OBJS-$(CONFIG_MPEG2VIDEO_DECODER)      += mpeg12dec.o mpeg12.o mpeg12data.o
429 429
 OBJS-$(CONFIG_MPEG2VIDEO_ENCODER)      += mpeg12enc.o mpeg12.o
430 430
 OBJS-$(CONFIG_MPEG2_MEDIACODEC_DECODER) += mediacodecdec.o
431 431
 OBJS-$(CONFIG_MPEG2_VAAPI_ENCODER)     += vaapi_encode_mpeg2.o
432
+OBJS-$(CONFIG_MPEG2_V4L2M2M_DECODER)   += v4l2_m2m_dec.o
432 433
 OBJS-$(CONFIG_MPEG4_DECODER)           += xvididct.o
433 434
 OBJS-$(CONFIG_MPEG4_MEDIACODEC_DECODER) += mediacodecdec.o
434 435
 OBJS-$(CONFIG_MPEG4_OMX_ENCODER)       += omx.o
436
+OBJS-$(CONFIG_MPEG4_V4L2M2M_DECODER)   += v4l2_m2m_dec.o
437
+OBJS-$(CONFIG_MPEG4_V4L2M2M_ENCODER)   += v4l2_m2m_enc.o
435 438
 OBJS-$(CONFIG_MPL2_DECODER)            += mpl2dec.o ass.o
436 439
 OBJS-$(CONFIG_MSA1_DECODER)            += mss3.o
437 440
 OBJS-$(CONFIG_MSCC_DECODER)            += mscc.o
... ...
@@ -605,6 +616,7 @@ OBJS-$(CONFIG_VC1_DECODER)             += vc1dec.o vc1_block.o vc1_loopfilter.o
605 605
 OBJS-$(CONFIG_VC1_CUVID_DECODER)       += cuvid.o
606 606
 OBJS-$(CONFIG_VC1_MMAL_DECODER)        += mmaldec.o
607 607
 OBJS-$(CONFIG_VC1_QSV_DECODER)         += qsvdec_other.o
608
+OBJS-$(CONFIG_VC1_V4L2M2M_DECODER)     += v4l2_m2m_dec.o
608 609
 OBJS-$(CONFIG_VC2_ENCODER)             += vc2enc.o vc2enc_dwt.o diractab.o
609 610
 OBJS-$(CONFIG_VCR1_DECODER)            += vcr1.o
610 611
 OBJS-$(CONFIG_VMDAUDIO_DECODER)        += vmdaudio.o
... ...
@@ -624,6 +636,8 @@ OBJS-$(CONFIG_VP8_CUVID_DECODER)       += cuvid.o
624 624
 OBJS-$(CONFIG_VP8_MEDIACODEC_DECODER)  += mediacodecdec.o
625 625
 OBJS-$(CONFIG_VP8_QSV_DECODER)         += qsvdec_other.o
626 626
 OBJS-$(CONFIG_VP8_VAAPI_ENCODER)       += vaapi_encode_vp8.o
627
+OBJS-$(CONFIG_VP8_V4L2M2M_DECODER)     += v4l2_m2m_dec.o
628
+OBJS-$(CONFIG_VP8_V4L2M2M_ENCODER)     += v4l2_m2m_enc.o
627 629
 OBJS-$(CONFIG_VP9_DECODER)             += vp9.o vp9data.o vp9dsp.o vp9lpf.o vp9recon.o \
628 630
                                           vp9block.o vp9prob.o vp9mvs.o vp56rac.o \
629 631
                                           vp9dsp_8bpp.o vp9dsp_10bpp.o vp9dsp_12bpp.o
... ...
@@ -631,6 +645,7 @@ OBJS-$(CONFIG_VP9_CUVID_DECODER)       += cuvid.o
631 631
 OBJS-$(CONFIG_VP9_MEDIACODEC_DECODER)  += mediacodecdec.o
632 632
 OBJS-$(CONFIG_VP9_VAAPI_ENCODER)       += vaapi_encode_vp9.o
633 633
 OBJS-$(CONFIG_VPLAYER_DECODER)         += textdec.o ass.o
634
+OBJS-$(CONFIG_VP9_V4L2M2M_DECODER)     += v4l2_m2m_dec.o
634 635
 OBJS-$(CONFIG_VQA_DECODER)             += vqavideo.o
635 636
 OBJS-$(CONFIG_WAVPACK_DECODER)         += wavpack.o
636 637
 OBJS-$(CONFIG_WAVPACK_ENCODER)         += wavpackenc.o
... ...
@@ -208,8 +208,10 @@ static void register_all(void)
208 208
     REGISTER_ENCDEC (H263,              h263);
209 209
     REGISTER_DECODER(H263I,             h263i);
210 210
     REGISTER_ENCDEC (H263P,             h263p);
211
+    REGISTER_ENCDEC (H263_V4L2M2M,      h263_v4l2m2m);
211 212
     REGISTER_DECODER(H264,              h264);
212 213
     REGISTER_DECODER(H264_CRYSTALHD,    h264_crystalhd);
214
+    REGISTER_ENCDEC (H264_V4L2M2M,      h264_v4l2m2m);
213 215
     REGISTER_DECODER(H264_MEDIACODEC,   h264_mediacodec);
214 216
     REGISTER_DECODER(H264_MMAL,         h264_mmal);
215 217
     REGISTER_DECODER(H264_QSV,          h264_qsv);
... ...
@@ -220,6 +222,7 @@ static void register_all(void)
220 220
     REGISTER_ENCDEC (HAP,               hap);
221 221
     REGISTER_DECODER(HEVC,              hevc);
222 222
     REGISTER_DECODER(HEVC_QSV,          hevc_qsv);
223
+    REGISTER_ENCDEC (HEVC_V4L2M2M,      hevc_v4l2m2m);
223 224
     REGISTER_DECODER(HNM4_VIDEO,        hnm4_video);
224 225
     REGISTER_DECODER(HQ_HQA,            hq_hqa);
225 226
     REGISTER_DECODER(HQX,               hqx);
... ...
@@ -254,6 +257,7 @@ static void register_all(void)
254 254
     REGISTER_ENCDEC (MPEG2VIDEO,        mpeg2video);
255 255
     REGISTER_ENCDEC (MPEG4,             mpeg4);
256 256
     REGISTER_DECODER(MPEG4_CRYSTALHD,   mpeg4_crystalhd);
257
+    REGISTER_ENCDEC (MPEG4_V4L2M2M,     mpeg4_v4l2m2m);
257 258
     REGISTER_DECODER(MPEG4_MMAL,        mpeg4_mmal);
258 259
 #if FF_API_VDPAU
259 260
     REGISTER_DECODER(MPEG4_VDPAU,       mpeg4_vdpau);
... ...
@@ -263,8 +267,10 @@ static void register_all(void)
263 263
     REGISTER_DECODER(MPEG_VDPAU,        mpeg_vdpau);
264 264
     REGISTER_DECODER(MPEG1_VDPAU,       mpeg1_vdpau);
265 265
 #endif
266
+    REGISTER_DECODER(MPEG1_V4L2M2M,     mpeg1_v4l2m2m);
266 267
     REGISTER_DECODER(MPEG2_MMAL,        mpeg2_mmal);
267 268
     REGISTER_DECODER(MPEG2_CRYSTALHD,   mpeg2_crystalhd);
269
+    REGISTER_DECODER(MPEG2_V4L2M2M,     mpeg2_v4l2m2m);
268 270
     REGISTER_DECODER(MPEG2_QSV,         mpeg2_qsv);
269 271
     REGISTER_DECODER(MPEG2_MEDIACODEC,  mpeg2_mediacodec);
270 272
     REGISTER_DECODER(MSA1,              msa1);
... ...
@@ -362,6 +368,7 @@ static void register_all(void)
362 362
     REGISTER_DECODER(VC1IMAGE,          vc1image);
363 363
     REGISTER_DECODER(VC1_MMAL,          vc1_mmal);
364 364
     REGISTER_DECODER(VC1_QSV,           vc1_qsv);
365
+    REGISTER_DECODER(VC1_V4L2M2M,       vc1_v4l2m2m);
365 366
     REGISTER_ENCODER(VC2,               vc2);
366 367
     REGISTER_DECODER(VCR1,              vcr1);
367 368
     REGISTER_DECODER(VMDVIDEO,          vmdvideo);
... ...
@@ -373,7 +380,9 @@ static void register_all(void)
373 373
     REGISTER_DECODER(VP6F,              vp6f);
374 374
     REGISTER_DECODER(VP7,               vp7);
375 375
     REGISTER_DECODER(VP8,               vp8);
376
+    REGISTER_ENCDEC (VP8_V4L2M2M,       vp8_v4l2m2m);
376 377
     REGISTER_DECODER(VP9,               vp9);
378
+    REGISTER_DECODER(VP9_V4L2M2M,       vp9_v4l2m2m);
377 379
     REGISTER_DECODER(VQA,               vqa);
378 380
     REGISTER_DECODER(BITPACKED,         bitpacked);
379 381
     REGISTER_DECODER(WEBP,              webp);
380 382
new file mode 100644
... ...
@@ -0,0 +1,453 @@
0
+/*
1
+ * V4L2 buffer helper functions.
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <sys/ioctl.h>
25
+#include <sys/mman.h>
26
+#include <unistd.h>
27
+#include <fcntl.h>
28
+#include <poll.h>
29
+#include "libavcodec/avcodec.h"
30
+#include "libavcodec/internal.h"
31
+#include "v4l2_context.h"
32
+#include "v4l2_buffers.h"
33
+#include "v4l2_m2m.h"
34
+
35
+#define USEC_PER_SEC 1000000
36
+
37
+static inline V4L2m2mContext *buf_to_m2mctx(V4L2Buffer *buf)
38
+{
39
+    return V4L2_TYPE_IS_OUTPUT(buf->context->type) ?
40
+        container_of(buf->context, V4L2m2mContext, output) :
41
+        container_of(buf->context, V4L2m2mContext, capture);
42
+}
43
+
44
+static inline AVCodecContext *logger(V4L2Buffer *buf)
45
+{
46
+    return buf_to_m2mctx(buf)->avctx;
47
+}
48
+
49
+static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
50
+{
51
+    V4L2m2mContext *s = buf_to_m2mctx(out);
52
+    AVRational v4l2_timebase = { 1, USEC_PER_SEC };
53
+    int64_t v4l2_pts;
54
+
55
+    if (pts == AV_NOPTS_VALUE)
56
+        pts = 0;
57
+
58
+    /* convert pts to v4l2 timebase */
59
+    v4l2_pts = av_rescale_q(pts, s->avctx->time_base, v4l2_timebase);
60
+    out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC;
61
+    out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC;
62
+}
63
+
64
+static inline uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
65
+{
66
+    V4L2m2mContext *s = buf_to_m2mctx(avbuf);
67
+    AVRational v4l2_timebase = { 1, USEC_PER_SEC };
68
+    int64_t v4l2_pts;
69
+
70
+    /* convert pts back to encoder timebase */
71
+    v4l2_pts = avbuf->buf.timestamp.tv_sec * USEC_PER_SEC + avbuf->buf.timestamp.tv_usec;
72
+
73
+    return av_rescale_q(v4l2_pts, v4l2_timebase, s->avctx->time_base);
74
+}
75
+
76
+static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf)
77
+{
78
+    enum v4l2_ycbcr_encoding ycbcr;
79
+    enum v4l2_colorspace cs;
80
+
81
+    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
82
+        buf->context->format.fmt.pix_mp.colorspace :
83
+        buf->context->format.fmt.pix.colorspace;
84
+
85
+    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
86
+        buf->context->format.fmt.pix_mp.ycbcr_enc:
87
+        buf->context->format.fmt.pix.ycbcr_enc;
88
+
89
+    switch(ycbcr) {
90
+    case V4L2_YCBCR_ENC_XV709:
91
+    case V4L2_YCBCR_ENC_709: return AVCOL_PRI_BT709;
92
+    case V4L2_YCBCR_ENC_XV601:
93
+    case V4L2_YCBCR_ENC_601:return AVCOL_PRI_BT470M;
94
+    default:
95
+        break;
96
+    }
97
+
98
+    switch(cs) {
99
+    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_PRI_BT470BG;
100
+    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_PRI_SMPTE170M;
101
+    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_PRI_SMPTE240M;
102
+    case V4L2_COLORSPACE_BT2020: return AVCOL_PRI_BT2020;
103
+    default:
104
+        break;
105
+    }
106
+
107
+    return AVCOL_PRI_UNSPECIFIED;
108
+}
109
+
110
+static enum AVColorRange v4l2_get_color_range(V4L2Buffer *buf)
111
+{
112
+    enum v4l2_quantization qt;
113
+
114
+    qt = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
115
+        buf->context->format.fmt.pix_mp.quantization :
116
+        buf->context->format.fmt.pix.quantization;
117
+
118
+    switch (qt) {
119
+    case V4L2_QUANTIZATION_LIM_RANGE: return AVCOL_RANGE_MPEG;
120
+    case V4L2_QUANTIZATION_FULL_RANGE: return AVCOL_RANGE_JPEG;
121
+    default:
122
+        break;
123
+    }
124
+
125
+     return AVCOL_RANGE_UNSPECIFIED;
126
+}
127
+
128
+static enum AVColorSpace v4l2_get_color_space(V4L2Buffer *buf)
129
+{
130
+    enum v4l2_ycbcr_encoding ycbcr;
131
+    enum v4l2_colorspace cs;
132
+
133
+    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
134
+        buf->context->format.fmt.pix_mp.colorspace :
135
+        buf->context->format.fmt.pix.colorspace;
136
+
137
+    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
138
+        buf->context->format.fmt.pix_mp.ycbcr_enc:
139
+        buf->context->format.fmt.pix.ycbcr_enc;
140
+
141
+    switch(cs) {
142
+    case V4L2_COLORSPACE_SRGB: return AVCOL_SPC_RGB;
143
+    case V4L2_COLORSPACE_REC709: return AVCOL_SPC_BT709;
144
+    case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_SPC_FCC;
145
+    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_SPC_BT470BG;
146
+    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_SPC_SMPTE170M;
147
+    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_SPC_SMPTE240M;
148
+    case V4L2_COLORSPACE_BT2020:
149
+        if (ycbcr == V4L2_YCBCR_ENC_BT2020_CONST_LUM)
150
+            return AVCOL_SPC_BT2020_CL;
151
+        else
152
+             return AVCOL_SPC_BT2020_NCL;
153
+    default:
154
+        break;
155
+    }
156
+
157
+    return AVCOL_SPC_UNSPECIFIED;
158
+}
159
+
160
+static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf)
161
+{
162
+    enum v4l2_ycbcr_encoding ycbcr;
163
+    enum v4l2_xfer_func xfer;
164
+    enum v4l2_colorspace cs;
165
+
166
+    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
167
+        buf->context->format.fmt.pix_mp.colorspace :
168
+        buf->context->format.fmt.pix.colorspace;
169
+
170
+    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
171
+        buf->context->format.fmt.pix_mp.ycbcr_enc:
172
+        buf->context->format.fmt.pix.ycbcr_enc;
173
+
174
+    xfer = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
175
+        buf->context->format.fmt.pix_mp.xfer_func:
176
+        buf->context->format.fmt.pix.xfer_func;
177
+
178
+    switch (xfer) {
179
+    case V4L2_XFER_FUNC_709: return AVCOL_TRC_BT709;
180
+    case V4L2_XFER_FUNC_SRGB: return AVCOL_TRC_IEC61966_2_1;
181
+    default:
182
+        break;
183
+    }
184
+
185
+    switch (cs) {
186
+    case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_TRC_GAMMA22;
187
+    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_TRC_GAMMA28;
188
+    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_TRC_SMPTE170M;
189
+    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_TRC_SMPTE240M;
190
+    default:
191
+        break;
192
+    }
193
+
194
+    switch (ycbcr) {
195
+    case V4L2_YCBCR_ENC_XV709:
196
+    case V4L2_YCBCR_ENC_XV601: return AVCOL_TRC_BT1361_ECG;
197
+    default:
198
+        break;
199
+    }
200
+
201
+    return AVCOL_TRC_UNSPECIFIED;
202
+}
203
+
204
+static void v4l2_free_buffer(void *opaque, uint8_t *unused)
205
+{
206
+    V4L2Buffer* avbuf = opaque;
207
+    V4L2m2mContext *s = buf_to_m2mctx(avbuf);
208
+
209
+    atomic_fetch_sub_explicit(&s->refcount, 1, memory_order_acq_rel);
210
+    if (s->reinit) {
211
+        if (!atomic_load(&s->refcount))
212
+            sem_post(&s->refsync);
213
+        return;
214
+    }
215
+
216
+    if (avbuf->context->streamon) {
217
+        ff_v4l2_buffer_enqueue(avbuf);
218
+        return;
219
+    }
220
+
221
+    if (!atomic_load(&s->refcount))
222
+        ff_v4l2_m2m_codec_end(s->avctx);
223
+}
224
+
225
+static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
226
+{
227
+    V4L2m2mContext *s = buf_to_m2mctx(in);
228
+
229
+    if (plane >= in->num_planes)
230
+        return AVERROR(EINVAL);
231
+
232
+    /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
233
+    *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
234
+                            in->plane_info[plane].length, v4l2_free_buffer, in, 0);
235
+    if (!*buf)
236
+        return AVERROR(ENOMEM);
237
+
238
+    in->status = V4L2BUF_RET_USER;
239
+    atomic_fetch_add_explicit(&s->refcount, 1, memory_order_relaxed);
240
+
241
+    return 0;
242
+}
243
+
244
+static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, AVBufferRef* bref)
245
+{
246
+    if (plane >= out->num_planes)
247
+        return AVERROR(EINVAL);
248
+
249
+    memcpy(out->plane_info[plane].mm_addr, data, FFMIN(size, out->plane_info[plane].length));
250
+
251
+    out->planes[plane].bytesused = FFMIN(size, out->plane_info[plane].length);
252
+    out->planes[plane].length = out->plane_info[plane].length;
253
+
254
+    return 0;
255
+}
256
+
257
+/******************************************************************************
258
+ *
259
+ *              V4L2uffer interface
260
+ *
261
+ ******************************************************************************/
262
+
263
+int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer* out)
264
+{
265
+    int i, ret;
266
+
267
+    for(i = 0; i < out->num_planes; i++) {
268
+        ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, frame->buf[i]);
269
+        if (ret)
270
+            return ret;
271
+    }
272
+
273
+    v4l2_set_pts(out, frame->pts);
274
+
275
+    return 0;
276
+}
277
+
278
+int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
279
+{
280
+    V4L2m2mContext *s = buf_to_m2mctx(avbuf);
281
+    int i, ret;
282
+
283
+    av_frame_unref(frame);
284
+
285
+    /* 1. get references to the actual data */
286
+    for (i = 0; i < avbuf->num_planes; i++) {
287
+        ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]);
288
+        if (ret)
289
+            return ret;
290
+
291
+        frame->linesize[i] = avbuf->plane_info[i].bytesperline;
292
+        frame->data[i] = frame->buf[i]->data;
293
+    }
294
+
295
+    /* 1.1 fixup special cases */
296
+    switch (avbuf->context->av_pix_fmt) {
297
+    case AV_PIX_FMT_NV12:
298
+        if (avbuf->num_planes > 1)
299
+            break;
300
+        frame->linesize[1] = avbuf->plane_info[0].bytesperline;
301
+        frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
302
+        break;
303
+    default:
304
+        break;
305
+    }
306
+
307
+    /* 2. get frame information */
308
+    frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME);
309
+    frame->format = avbuf->context->av_pix_fmt;
310
+    frame->color_primaries = v4l2_get_color_primaries(avbuf);
311
+    frame->colorspace = v4l2_get_color_space(avbuf);
312
+    frame->color_range = v4l2_get_color_range(avbuf);
313
+    frame->color_trc = v4l2_get_color_trc(avbuf);
314
+    frame->pts = v4l2_get_pts(avbuf);
315
+
316
+    /* these two values are updated also during re-init in v4l2_process_driver_event */
317
+    frame->height = s->output.height;
318
+    frame->width = s->output.width;
319
+
320
+    /* 3. report errors upstream */
321
+    if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
322
+        av_log(logger(avbuf), AV_LOG_ERROR, "%s: driver decode error\n", avbuf->context->name);
323
+        frame->decode_error_flags |= FF_DECODE_ERROR_INVALID_BITSTREAM;
324
+    }
325
+
326
+    return 0;
327
+}
328
+
329
+int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *avbuf)
330
+{
331
+    int ret;
332
+
333
+    av_packet_unref(pkt);
334
+    ret = v4l2_buf_to_bufref(avbuf, 0, &pkt->buf);
335
+    if (ret)
336
+        return ret;
337
+
338
+    pkt->size = V4L2_TYPE_IS_MULTIPLANAR(avbuf->buf.type) ? avbuf->buf.m.planes[0].bytesused : avbuf->buf.bytesused;
339
+    pkt->data = pkt->buf->data;
340
+
341
+    if (avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME)
342
+        pkt->flags |= AV_PKT_FLAG_KEY;
343
+
344
+    if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
345
+        av_log(logger(avbuf), AV_LOG_ERROR, "%s driver encode error\n", avbuf->context->name);
346
+        pkt->flags |= AV_PKT_FLAG_CORRUPT;
347
+    }
348
+
349
+    pkt->dts = pkt->pts = v4l2_get_pts(avbuf);
350
+
351
+    return 0;
352
+}
353
+
354
+int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out)
355
+{
356
+    int ret;
357
+
358
+    ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, pkt->buf);
359
+    if (ret)
360
+        return ret;
361
+
362
+    v4l2_set_pts(out, pkt->pts);
363
+
364
+    if (pkt->flags & AV_PKT_FLAG_KEY)
365
+        out->flags = V4L2_BUF_FLAG_KEYFRAME;
366
+
367
+    return 0;
368
+}
369
+
370
+int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index)
371
+{
372
+    V4L2Context *ctx = avbuf->context;
373
+    int ret, i;
374
+
375
+    avbuf->buf.memory = V4L2_MEMORY_MMAP;
376
+    avbuf->buf.type = ctx->type;
377
+    avbuf->buf.index = index;
378
+
379
+    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
380
+        avbuf->buf.length = VIDEO_MAX_PLANES;
381
+        avbuf->buf.m.planes = avbuf->planes;
382
+    }
383
+
384
+    ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QUERYBUF, &avbuf->buf);
385
+    if (ret < 0)
386
+        return AVERROR(errno);
387
+
388
+    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
389
+        avbuf->num_planes = 0;
390
+        for (;;) {
391
+            /* in MP, the V4L2 API states that buf.length means num_planes */
392
+            if (avbuf->num_planes >= avbuf->buf.length)
393
+                break;
394
+            if (avbuf->buf.m.planes[avbuf->num_planes].length)
395
+                avbuf->num_planes++;
396
+        }
397
+    } else
398
+        avbuf->num_planes = 1;
399
+
400
+    for (i = 0; i < avbuf->num_planes; i++) {
401
+
402
+        avbuf->plane_info[i].bytesperline = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
403
+            ctx->format.fmt.pix_mp.plane_fmt[i].bytesperline :
404
+            ctx->format.fmt.pix.bytesperline;
405
+
406
+        if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
407
+            avbuf->plane_info[i].length = avbuf->buf.m.planes[i].length;
408
+            avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.m.planes[i].length,
409
+                                           PROT_READ | PROT_WRITE, MAP_SHARED,
410
+                                           buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.planes[i].m.mem_offset);
411
+        } else {
412
+            avbuf->plane_info[i].length = avbuf->buf.length;
413
+            avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.length,
414
+                                          PROT_READ | PROT_WRITE, MAP_SHARED,
415
+                                          buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.offset);
416
+        }
417
+
418
+        if (avbuf->plane_info[i].mm_addr == MAP_FAILED)
419
+            return AVERROR(ENOMEM);
420
+    }
421
+
422
+    avbuf->status = V4L2BUF_AVAILABLE;
423
+
424
+    if (V4L2_TYPE_IS_OUTPUT(ctx->type))
425
+        return 0;
426
+
427
+    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
428
+        avbuf->buf.m.planes = avbuf->planes;
429
+        avbuf->buf.length   = avbuf->num_planes;
430
+
431
+    } else {
432
+        avbuf->buf.bytesused = avbuf->planes[0].bytesused;
433
+        avbuf->buf.length    = avbuf->planes[0].length;
434
+    }
435
+
436
+    return ff_v4l2_buffer_enqueue(avbuf);
437
+}
438
+
439
+int ff_v4l2_buffer_enqueue(V4L2Buffer* avbuf)
440
+{
441
+    int ret;
442
+
443
+    avbuf->buf.flags = avbuf->flags;
444
+
445
+    ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QBUF, &avbuf->buf);
446
+    if (ret < 0)
447
+        return AVERROR(errno);
448
+
449
+    avbuf->status = V4L2BUF_IN_DRIVER;
450
+
451
+    return 0;
452
+}
0 453
new file mode 100644
... ...
@@ -0,0 +1,121 @@
0
+/*
1
+ * V4L2 buffer helper functions.
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#ifndef AVCODEC_V4L2_BUFFERS_H
24
+#define AVCODEC_V4L2_BUFFERS_H
25
+
26
+enum V4L2Buffer_status {
27
+    V4L2BUF_AVAILABLE,
28
+    V4L2BUF_IN_DRIVER,
29
+    V4L2BUF_RET_USER,
30
+};
31
+
32
+/**
33
+ * V4L2Buffer (wrapper for v4l2_buffer management)
34
+ */
35
+typedef struct V4L2Buffer {
36
+    /* each buffer needs to have a reference to its context */
37
+    struct V4L2Context *context;
38
+
39
+    /* keep track of the mmap address and mmap length */
40
+    struct V4L2Plane_info {
41
+        int bytesperline;
42
+        void * mm_addr;
43
+        size_t length;
44
+    } plane_info[VIDEO_MAX_PLANES];
45
+
46
+    int num_planes;
47
+
48
+    /* the v4l2_buffer buf.m.planes pointer uses the planes[] mem */
49
+    struct v4l2_buffer buf;
50
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
51
+
52
+    int flags;
53
+    enum V4L2Buffer_status status;
54
+
55
+} V4L2Buffer;
56
+
57
+/**
58
+ * Extracts the data from a V4L2Buffer to an AVFrame
59
+ *
60
+ * @param[in] frame The AVFRame to push the information to
61
+ * @param[in] buf The V4L2Buffer to get the information from
62
+ *
63
+ * @returns 0 in case of success, EINVAL if the number of planes is incorrect,
64
+ * ENOMEM if the AVBufferRef cant be created.
65
+ */
66
+int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *buf);
67
+
68
+/**
69
+ * Extracts the data from a V4L2Buffer to an AVPacket
70
+ *
71
+ * @param[in] pkt The AVPacket to push the information to
72
+ * @param[in] buf The V4L2Buffer to get the information from
73
+ *
74
+ * @returns 0 in case of success, EINVAL if the number of planes is incorrect,
75
+ * ENOMEM if the AVBufferRef cant be created.
76
+ *
77
+ */
78
+int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *buf);
79
+
80
+/**
81
+ * Extracts the data from an AVPacket to a V4L2Buffer
82
+ *
83
+ * @param[in]  frame AVPacket to get the data from
84
+ * @param[in]  avbuf V4L2Bfuffer to push the information to
85
+ *
86
+ * @returns 0 in case of success, negative otherwise
87
+ */
88
+int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out);
89
+
90
+/**
91
+ * Extracts the data from an AVFrame to a V4L2Buffer
92
+ *
93
+ * @param[in]  frame AVFrame to get the data from
94
+ * @param[in]  avbuf V4L2Bfuffer to push the information to
95
+ *
96
+ * @returns 0 in case of success, negative otherwise
97
+ */
98
+int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer* out);
99
+
100
+/**
101
+ * Initializes a V4L2Buffer
102
+ *
103
+ * @param[in]  avbuf V4L2Bfuffer to initialize
104
+ * @param[in]  index v4l2 buffer id
105
+ *
106
+ * @returns 0 in case of success, negative otherwise
107
+ */
108
+int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index);
109
+
110
+/**
111
+ * Enqueues a V4L2Buffer
112
+ *
113
+ * @param[in] avbuf V4L2Bfuffer to push to the driver
114
+ *
115
+ * @returns 0 in case of success, negative otherwise
116
+ */
117
+int ff_v4l2_buffer_enqueue(V4L2Buffer* avbuf);
118
+
119
+
120
+#endif // AVCODEC_V4L2_BUFFERS_H
0 121
new file mode 100644
... ...
@@ -0,0 +1,667 @@
0
+/*
1
+ * V4L2 context helper functions.
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <sys/ioctl.h>
25
+#include <sys/mman.h>
26
+#include <unistd.h>
27
+#include <fcntl.h>
28
+#include <poll.h>
29
+#include "libavcodec/avcodec.h"
30
+#include "libavcodec/internal.h"
31
+#include "v4l2_buffers.h"
32
+#include "v4l2_fmt.h"
33
+#include "v4l2_m2m.h"
34
+
35
+struct v4l2_format_update {
36
+    uint32_t v4l2_fmt;
37
+    int update_v4l2;
38
+
39
+    enum AVPixelFormat av_fmt;
40
+    int update_avfmt;
41
+};
42
+
43
+static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx)
44
+{
45
+    return V4L2_TYPE_IS_OUTPUT(ctx->type) ?
46
+        container_of(ctx, V4L2m2mContext, output) :
47
+        container_of(ctx, V4L2m2mContext, capture);
48
+}
49
+
50
+static inline AVCodecContext *logger(V4L2Context *ctx)
51
+{
52
+    return ctx_to_m2mctx(ctx)->avctx;
53
+}
54
+
55
+static inline unsigned int v4l2_get_width(struct v4l2_format *fmt)
56
+{
57
+    return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width;
58
+}
59
+
60
+static inline unsigned int v4l2_get_height(struct v4l2_format *fmt)
61
+{
62
+    return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height;
63
+}
64
+
65
+static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2)
66
+{
67
+    struct v4l2_format *fmt1 = &ctx->format;
68
+    int ret =  V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
69
+        fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width ||
70
+        fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height
71
+        :
72
+        fmt1->fmt.pix.width != fmt2->fmt.pix.width ||
73
+        fmt1->fmt.pix.height != fmt2->fmt.pix.height;
74
+
75
+    if (ret)
76
+        av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n",
77
+            ctx->name,
78
+            v4l2_get_width(fmt1), v4l2_get_height(fmt1),
79
+            v4l2_get_width(fmt2), v4l2_get_height(fmt2));
80
+
81
+    return ret;
82
+}
83
+
84
+static inline int v4l2_type_supported(V4L2Context *ctx)
85
+{
86
+    return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
87
+        ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
88
+        ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
89
+        ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;
90
+}
91
+
92
+static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt)
93
+{
94
+    ctx->format.type = ctx->type;
95
+
96
+    if (fmt->update_avfmt)
97
+        ctx->av_pix_fmt = fmt->av_fmt;
98
+
99
+    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
100
+        /* update the sizes to handle the reconfiguration of the capture stream at runtime */
101
+        ctx->format.fmt.pix_mp.height = ctx->height;
102
+        ctx->format.fmt.pix_mp.width = ctx->width;
103
+        if (fmt->update_v4l2)
104
+            ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt;
105
+    } else {
106
+        ctx->format.fmt.pix.height = ctx->height;
107
+        ctx->format.fmt.pix.width = ctx->width;
108
+        if (fmt->update_v4l2)
109
+            ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt;
110
+    }
111
+}
112
+
113
+/**
114
+ * returns 1 if reinit was succesful, negative if it failed
115
+ * returns 0 if reinit was not executed
116
+ */
117
+static int v4l2_handle_event(V4L2Context *ctx)
118
+{
119
+    V4L2m2mContext *s = ctx_to_m2mctx(ctx);
120
+    struct v4l2_format cap_fmt = s->capture.format;
121
+    struct v4l2_format out_fmt = s->output.format;
122
+    struct v4l2_event evt = { 0 };
123
+    int full_reinit, reinit, ret;
124
+
125
+    ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt);
126
+    if (ret < 0) {
127
+        av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name);
128
+        return 0;
129
+    }
130
+
131
+    if (evt.type != V4L2_EVENT_SOURCE_CHANGE)
132
+        return 0;
133
+
134
+    ret = ioctl(s->fd, VIDIOC_G_FMT, &out_fmt);
135
+    if (ret) {
136
+        av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->output.name);
137
+        return 0;
138
+    }
139
+
140
+    ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt);
141
+    if (ret) {
142
+        av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name);
143
+        return 0;
144
+    }
145
+
146
+    full_reinit = v4l2_resolution_changed(&s->output, &out_fmt);
147
+    if (full_reinit) {
148
+        s->output.height = v4l2_get_height(&out_fmt);
149
+        s->output.width = v4l2_get_width(&out_fmt);
150
+    }
151
+
152
+    reinit = v4l2_resolution_changed(&s->capture, &cap_fmt);
153
+    if (reinit) {
154
+        s->capture.height = v4l2_get_height(&cap_fmt);
155
+        s->capture.width = v4l2_get_width(&cap_fmt);
156
+    }
157
+
158
+    if (full_reinit || reinit)
159
+        s->reinit = 1;
160
+
161
+    if (full_reinit) {
162
+        ret = ff_v4l2_m2m_codec_full_reinit(s);
163
+        if (ret) {
164
+            av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_full_reinit\n");
165
+            return -EINVAL;
166
+        }
167
+        goto reinit_run;
168
+    }
169
+
170
+    if (reinit) {
171
+        ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height);
172
+        if (ret < 0)
173
+            av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n");
174
+
175
+        ret = ff_v4l2_m2m_codec_reinit(s);
176
+        if (ret) {
177
+            av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n");
178
+            return -EINVAL;
179
+        }
180
+        goto reinit_run;
181
+    }
182
+
183
+    /* dummy event received */
184
+    return 0;
185
+
186
+    /* reinit executed */
187
+reinit_run:
188
+    return 1;
189
+}
190
+
191
+static int v4l2_stop_decode(V4L2Context *ctx)
192
+{
193
+    struct v4l2_decoder_cmd cmd = {
194
+        .cmd = V4L2_DEC_CMD_STOP,
195
+    };
196
+    int ret;
197
+
198
+    ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd);
199
+    if (ret) {
200
+        /* DECODER_CMD is optional */
201
+        if (errno == ENOTTY)
202
+            return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
203
+    }
204
+
205
+    return 0;
206
+}
207
+
208
+static int v4l2_stop_encode(V4L2Context *ctx)
209
+{
210
+    struct v4l2_encoder_cmd cmd = {
211
+        .cmd = V4L2_ENC_CMD_STOP,
212
+    };
213
+    int ret;
214
+
215
+    ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd);
216
+    if (ret) {
217
+        /* ENCODER_CMD is optional */
218
+        if (errno == ENOTTY)
219
+            return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
220
+    }
221
+
222
+    return 0;
223
+}
224
+
225
+static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout)
226
+{
227
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
228
+    struct v4l2_buffer buf = { 0 };
229
+    V4L2Buffer* avbuf = NULL;
230
+    struct pollfd pfd = {
231
+        .events =  POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */
232
+        .fd = ctx_to_m2mctx(ctx)->fd,
233
+    };
234
+    int ret;
235
+
236
+    if (V4L2_TYPE_IS_OUTPUT(ctx->type))
237
+        pfd.events =  POLLOUT | POLLWRNORM;
238
+
239
+    for (;;) {
240
+        ret = poll(&pfd, 1, timeout);
241
+        if (ret > 0)
242
+            break;
243
+        if (errno == EINTR)
244
+            continue;
245
+
246
+        /* timeout is being used to indicate last valid bufer when draining */
247
+        if (ctx_to_m2mctx(ctx)->draining)
248
+            ctx->done = 1;
249
+
250
+        return NULL;
251
+    }
252
+
253
+    /* 0. handle errors */
254
+    if (pfd.revents & POLLERR) {
255
+        av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name);
256
+        return NULL;
257
+    }
258
+
259
+    /* 1. handle resolution changes */
260
+    if (pfd.revents & POLLPRI) {
261
+        ret = v4l2_handle_event(ctx);
262
+        if (ret < 0) {
263
+            /* if re-init failed, abort */
264
+            ctx->done = EINVAL;
265
+            return NULL;
266
+        }
267
+        if (ret) {
268
+            /* if re-init was successfull drop the buffer (if there was one)
269
+             * since we had to reconfigure capture (unmap all buffers)
270
+             */
271
+            return NULL;
272
+        }
273
+    }
274
+
275
+    /* 2. dequeue the buffer */
276
+    if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) {
277
+
278
+        if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) {
279
+            /* there is a capture buffer ready */
280
+            if (pfd.revents & (POLLIN | POLLRDNORM))
281
+                goto dequeue;
282
+
283
+            /* the driver is ready to accept more input; instead of waiting for the capture
284
+             * buffer to complete we return NULL so input can proceed (we are single threaded)
285
+             */
286
+            if (pfd.revents & (POLLOUT | POLLWRNORM))
287
+                return NULL;
288
+        }
289
+
290
+dequeue:
291
+        memset(&buf, 0, sizeof(buf));
292
+        buf.memory = V4L2_MEMORY_MMAP;
293
+        buf.type = ctx->type;
294
+        if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
295
+            memset(planes, 0, sizeof(planes));
296
+            buf.length = VIDEO_MAX_PLANES;
297
+            buf.m.planes = planes;
298
+        }
299
+
300
+        ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf);
301
+        if (ret) {
302
+            if (errno != EAGAIN) {
303
+                ctx->done = errno;
304
+                if (errno != EPIPE)
305
+                    av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n",
306
+                        ctx->name, av_err2str(AVERROR(errno)));
307
+            }
308
+        } else {
309
+            avbuf = &ctx->buffers[buf.index];
310
+            avbuf->status = V4L2BUF_AVAILABLE;
311
+            avbuf->buf = buf;
312
+            if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
313
+                memcpy(avbuf->planes, planes, sizeof(planes));
314
+                avbuf->buf.m.planes = avbuf->planes;
315
+            }
316
+        }
317
+    }
318
+
319
+    return avbuf;
320
+}
321
+
322
+static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx)
323
+{
324
+    int timeout = 0; /* return when no more buffers to dequeue */
325
+    int i;
326
+
327
+    /* get back as many output buffers as possible */
328
+    if (V4L2_TYPE_IS_OUTPUT(ctx->type)) {
329
+          do {
330
+          } while (v4l2_dequeue_v4l2buf(ctx, timeout));
331
+    }
332
+
333
+    for (i = 0; i < ctx->num_buffers; i++) {
334
+        if (ctx->buffers[i].status == V4L2BUF_AVAILABLE)
335
+            return &ctx->buffers[i];
336
+    }
337
+
338
+    return NULL;
339
+}
340
+
341
+static int v4l2_release_buffers(V4L2Context* ctx)
342
+{
343
+    struct v4l2_requestbuffers req = {
344
+        .memory = V4L2_MEMORY_MMAP,
345
+        .type = ctx->type,
346
+        .count = 0, /* 0 -> unmaps buffers from the driver */
347
+    };
348
+    int i, j;
349
+
350
+    for (i = 0; i < ctx->num_buffers; i++) {
351
+        V4L2Buffer *buffer = &ctx->buffers[i];
352
+
353
+        for (j = 0; j < buffer->num_planes; j++) {
354
+            struct V4L2Plane_info *p = &buffer->plane_info[j];
355
+            if (p->mm_addr && p->length)
356
+                if (munmap(p->mm_addr, p->length) < 0)
357
+                    av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno)));
358
+        }
359
+    }
360
+
361
+    return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req);
362
+}
363
+
364
+static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt)
365
+{
366
+    struct v4l2_format *fmt = &ctx->format;
367
+    uint32_t v4l2_fmt;
368
+    int ret;
369
+
370
+    v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt);
371
+    if (!v4l2_fmt)
372
+        return AVERROR(EINVAL);
373
+
374
+    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type))
375
+        fmt->fmt.pix_mp.pixelformat = v4l2_fmt;
376
+    else
377
+        fmt->fmt.pix.pixelformat = v4l2_fmt;
378
+
379
+    fmt->type = ctx->type;
380
+
381
+    ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt);
382
+    if (ret)
383
+        return AVERROR(EINVAL);
384
+
385
+    return 0;
386
+}
387
+
388
+static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p)
389
+{
390
+    enum AVPixelFormat pixfmt = ctx->av_pix_fmt;
391
+    struct v4l2_fmtdesc fdesc;
392
+    int ret;
393
+
394
+    memset(&fdesc, 0, sizeof(fdesc));
395
+    fdesc.type = ctx->type;
396
+
397
+    if (pixfmt != AV_PIX_FMT_NONE) {
398
+        ret = v4l2_try_raw_format(ctx, pixfmt);
399
+        if (ret)
400
+            pixfmt = AV_PIX_FMT_NONE;
401
+        else
402
+            return 0;
403
+    }
404
+
405
+    for (;;) {
406
+        ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
407
+        if (ret)
408
+            return AVERROR(EINVAL);
409
+
410
+        pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO);
411
+        ret = v4l2_try_raw_format(ctx, pixfmt);
412
+        if (ret){
413
+            fdesc.index++;
414
+            continue;
415
+        }
416
+
417
+        *p = pixfmt;
418
+
419
+        return 0;
420
+    }
421
+
422
+    return AVERROR(EINVAL);
423
+}
424
+
425
+static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p)
426
+{
427
+    struct v4l2_fmtdesc fdesc;
428
+    uint32_t v4l2_fmt;
429
+    int ret;
430
+
431
+    /* translate to a valid v4l2 format */
432
+    v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id);
433
+    if (!v4l2_fmt)
434
+        return AVERROR(EINVAL);
435
+
436
+    /* check if the driver supports this format */
437
+    memset(&fdesc, 0, sizeof(fdesc));
438
+    fdesc.type = ctx->type;
439
+
440
+    for (;;) {
441
+        ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
442
+        if (ret)
443
+            return AVERROR(EINVAL);
444
+
445
+        if (fdesc.pixelformat == v4l2_fmt)
446
+            break;
447
+
448
+        fdesc.index++;
449
+    }
450
+
451
+    *p = v4l2_fmt;
452
+
453
+    return 0;
454
+}
455
+
456
+ /*****************************************************************************
457
+  *
458
+  *             V4L2 Context Interface
459
+  *
460
+  *****************************************************************************/
461
+
462
+int ff_v4l2_context_set_status(V4L2Context* ctx, int cmd)
463
+{
464
+    int type = ctx->type;
465
+    int ret;
466
+
467
+    ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type);
468
+    if (ret < 0)
469
+        return AVERROR(errno);
470
+
471
+    ctx->streamon = (cmd == VIDIOC_STREAMON);
472
+
473
+    return 0;
474
+}
475
+
476
+int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame)
477
+{
478
+    V4L2m2mContext *s = ctx_to_m2mctx(ctx);
479
+    V4L2Buffer* avbuf;
480
+    int ret;
481
+
482
+    if (!frame) {
483
+        ret = v4l2_stop_encode(ctx);
484
+        if (ret)
485
+            av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name);
486
+        s->draining= 1;
487
+        return 0;
488
+    }
489
+
490
+    avbuf = v4l2_getfree_v4l2buf(ctx);
491
+    if (!avbuf)
492
+        return AVERROR(ENOMEM);
493
+
494
+    ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf);
495
+    if (ret)
496
+        return ret;
497
+
498
+    return ff_v4l2_buffer_enqueue(avbuf);
499
+}
500
+
501
+int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt)
502
+{
503
+    V4L2m2mContext *s = ctx_to_m2mctx(ctx);
504
+    V4L2Buffer* avbuf;
505
+    int ret;
506
+
507
+    if (!pkt->size) {
508
+        ret = v4l2_stop_decode(ctx);
509
+        if (ret)
510
+            av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name);
511
+        s->draining = 1;
512
+        return 0;
513
+    }
514
+
515
+    avbuf = v4l2_getfree_v4l2buf(ctx);
516
+    if (!avbuf)
517
+        return AVERROR(ENOMEM);
518
+
519
+    ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf);
520
+    if (ret)
521
+        return ret;
522
+
523
+    return ff_v4l2_buffer_enqueue(avbuf);
524
+}
525
+
526
+int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame)
527
+{
528
+    V4L2Buffer* avbuf = NULL;
529
+
530
+    /* if we are draining, we are no longer inputing data, therefore enable a
531
+     * timeout so we can dequeue and flag the last valid buffer.
532
+     *
533
+     * blocks until:
534
+     *  1. decoded frame available
535
+     *  2. an input buffer is ready to be dequeued
536
+     */
537
+    avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1);
538
+    if (!avbuf) {
539
+        if (ctx->done)
540
+            return AVERROR_EOF;
541
+
542
+        return AVERROR(EAGAIN);
543
+    }
544
+
545
+    return ff_v4l2_buffer_buf_to_avframe(frame, avbuf);
546
+}
547
+
548
+int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt)
549
+{
550
+    V4L2Buffer* avbuf = NULL;
551
+
552
+    /* if we are draining, we are no longer inputing data, therefore enable a
553
+     * timeout so we can dequeue and flag the last valid buffer.
554
+     *
555
+     * blocks until:
556
+     *  1. encoded packet available
557
+     *  2. an input buffer ready to be dequeued
558
+     */
559
+    avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1);
560
+    if (!avbuf) {
561
+        if (ctx->done)
562
+            return AVERROR_EOF;
563
+
564
+        return AVERROR(EAGAIN);
565
+    }
566
+
567
+    return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf);
568
+}
569
+
570
+int ff_v4l2_context_get_format(V4L2Context* ctx)
571
+{
572
+    struct v4l2_format_update fmt = { 0 };
573
+    int ret;
574
+
575
+    if  (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) {
576
+        ret = v4l2_get_raw_format(ctx, &fmt.av_fmt);
577
+        if (ret)
578
+            return ret;
579
+
580
+        fmt.update_avfmt = 1;
581
+        v4l2_save_to_context(ctx, &fmt);
582
+
583
+        /* format has been tried already */
584
+        return ret;
585
+    }
586
+
587
+    ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt);
588
+    if (ret)
589
+        return ret;
590
+
591
+    fmt.update_v4l2 = 1;
592
+    v4l2_save_to_context(ctx, &fmt);
593
+
594
+    return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format);
595
+}
596
+
597
+int ff_v4l2_context_set_format(V4L2Context* ctx)
598
+{
599
+    return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format);
600
+}
601
+
602
+void ff_v4l2_context_release(V4L2Context* ctx)
603
+{
604
+    int ret;
605
+
606
+    if (!ctx->buffers)
607
+        return;
608
+
609
+    ret = v4l2_release_buffers(ctx);
610
+    if (ret)
611
+        av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name);
612
+
613
+    av_free(ctx->buffers);
614
+    ctx->buffers = NULL;
615
+}
616
+
617
+int ff_v4l2_context_init(V4L2Context* ctx)
618
+{
619
+    V4L2m2mContext *s = ctx_to_m2mctx(ctx);
620
+    struct v4l2_requestbuffers req;
621
+    int ret, i;
622
+
623
+    if (!v4l2_type_supported(ctx)) {
624
+        av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type);
625
+        return AVERROR_PATCHWELCOME;
626
+    }
627
+
628
+    ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format);
629
+    if (ret)
630
+        av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name);
631
+
632
+    memset(&req, 0, sizeof(req));
633
+    req.count = ctx->num_buffers;
634
+    req.memory = V4L2_MEMORY_MMAP;
635
+    req.type = ctx->type;
636
+    ret = ioctl(s->fd, VIDIOC_REQBUFS, &req);
637
+    if (ret < 0)
638
+        return AVERROR(errno);
639
+
640
+    ctx->num_buffers = req.count;
641
+    ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer));
642
+    if (!ctx->buffers) {
643
+            av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name);
644
+            return AVERROR(ENOMEM);
645
+    }
646
+
647
+    for (i = 0; i < req.count; i++) {
648
+        ctx->buffers[i].context = ctx;
649
+        ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i);
650
+        if (ret < 0) {
651
+            av_log(logger(ctx), AV_LOG_ERROR, "%s buffer initialization (%s)\n", ctx->name, av_err2str(ret));
652
+            av_free(ctx->buffers);
653
+            return ret;
654
+        }
655
+    }
656
+
657
+    av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name,
658
+        V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat),
659
+        req.count,
660
+        v4l2_get_width(&ctx->format),
661
+        v4l2_get_height(&ctx->format),
662
+        V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage,
663
+        V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline);
664
+
665
+    return 0;
666
+}
0 667
new file mode 100644
... ...
@@ -0,0 +1,181 @@
0
+/*
1
+ * V4L2 context helper functions.
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#ifndef AVCODEC_V4L2_CONTEXT_H
24
+#define AVCODEC_V4L2_CONTEXT_H
25
+
26
+#include <stdatomic.h>
27
+#include "libavcodec/avcodec.h"
28
+#include "libavutil/pixfmt.h"
29
+#include "libavutil/frame.h"
30
+#include "libavutil/buffer.h"
31
+#include "v4l2_buffers.h"
32
+
33
+typedef struct V4L2Context {
34
+    /**
35
+     * context name.
36
+     */
37
+    const char* name;
38
+
39
+    /**
40
+     * Type of this buffer context.
41
+     * See V4L2_BUF_TYPE_VIDEO_* in videodev2.h
42
+     * Readonly after init.
43
+     */
44
+    enum v4l2_buf_type type;
45
+
46
+    /**
47
+     * AVPixelFormat corresponding to this buffer context.
48
+     * AV_PIX_FMT_NONE means this is an encoded stream.
49
+     */
50
+    enum AVPixelFormat av_pix_fmt;
51
+
52
+    /**
53
+     * AVCodecID corresponding to this buffer context.
54
+     * AV_CODEC_ID_RAWVIDEO means this is a raw stream and av_pix_fmt must be set to a valid value.
55
+     */
56
+    enum AVCodecID av_codec_id;
57
+
58
+    /**
59
+     * Format returned by the driver after initializing the buffer context.
60
+     * Readonly after init.
61
+     */
62
+    struct v4l2_format format;
63
+
64
+    /**
65
+     * Width and height of the frames it produces (in case of a capture context, e.g. when decoding)
66
+     * or accepts (in case of an output context, e.g. when encoding).
67
+     */
68
+    int width, height;
69
+
70
+    /**
71
+     * Indexed array of V4L2Buffers
72
+     */
73
+    V4L2Buffer *buffers;
74
+
75
+    /**
76
+     * Readonly after init.
77
+     */
78
+    int num_buffers;
79
+
80
+    /**
81
+     * Whether the stream has been started (VIDIOC_STREAMON has been sent).
82
+     */
83
+    int streamon;
84
+
85
+    /**
86
+     *  Either no more buffers available or an unrecoverable error was notified
87
+     *  by the V4L2 kernel driver: once set the context has to be exited.
88
+     */
89
+    int done;
90
+
91
+} V4L2Context;
92
+
93
+/**
94
+ * Initializes a V4L2Context.
95
+ *
96
+ * @param[in] ctx A pointer to a V4L2Context. See V4L2Context description for required variables.
97
+ * @return 0 in case of success, a negative value representing the error otherwise.
98
+ */
99
+int ff_v4l2_context_init(V4L2Context* ctx);
100
+
101
+/**
102
+ * Sets the V4L2Context format in the v4l2 driver.
103
+ *
104
+ * @param[in] ctx A pointer to a V4L2Context. See V4L2Context description for required variables.
105
+ * @return 0 in case of success, a negative value representing the error otherwise.
106
+ */
107
+int ff_v4l2_context_set_format(V4L2Context* ctx);
108
+
109
+/**
110
+ * Queries the driver for a valid v4l2 format and copies it to the context.
111
+ *
112
+ * @param[in] ctx A pointer to a V4L2Context. See V4L2Context description for required variables.
113
+ * @return 0 in case of success, a negative value representing the error otherwise.
114
+ */
115
+int ff_v4l2_context_get_format(V4L2Context* ctx);
116
+
117
+/**
118
+ * Releases a V4L2Context.
119
+ *
120
+ * @param[in] ctx A pointer to a V4L2Context.
121
+ *               The caller is reponsible for freeing it.
122
+ *               It must not be used after calling this function.
123
+ */
124
+void ff_v4l2_context_release(V4L2Context* ctx);
125
+
126
+/**
127
+ * Sets the status of a V4L2Context.
128
+ *
129
+ * @param[in] ctx A pointer to a V4L2Context.
130
+ * @param[in] cmd The status to set (VIDIOC_STREAMON or VIDIOC_STREAMOFF).
131
+ *                Warning: If VIDIOC_STREAMOFF is sent to a buffer context that still has some frames buffered,
132
+ *                those frames will be dropped.
133
+ * @return 0 in case of success, a negative value representing the error otherwise.
134
+ */
135
+int ff_v4l2_context_set_status(V4L2Context* ctx, int cmd);
136
+
137
+/**
138
+ * Dequeues a buffer from a V4L2Context to an AVPacket.
139
+ *
140
+ * The pkt must be non NULL.
141
+ * @param[in] ctx The V4L2Context to dequeue from.
142
+ * @param[inout] pkt The AVPacket to dequeue to.
143
+ * @return 0 in case of success, AVERROR(EAGAIN) if no buffer was ready, another negative error in case of error.
144
+ */
145
+int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt);
146
+
147
+/**
148
+ * Dequeues a buffer from a V4L2Context to an AVFrame.
149
+ *
150
+ * The frame must be non NULL.
151
+ * @param[in] ctx The V4L2Context to dequeue from.
152
+ * @param[inout] f The AVFrame to dequeue to.
153
+ * @return 0 in case of success, AVERROR(EAGAIN) if no buffer was ready, another negative error in case of error.
154
+ */
155
+int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* f);
156
+
157
+/**
158
+ * Enqueues a buffer to a V4L2Context from an AVPacket
159
+ *
160
+ * The packet must be non NULL.
161
+ * When the size of the pkt is null, the buffer is not queued but a V4L2_DEC_CMD_STOP command is sent instead to the driver.
162
+ *
163
+ * @param[in] ctx The V4L2Context to enqueue to.
164
+ * @param[in] pkt A pointer to an AVPacket.
165
+ * @return 0 in case of success, a negative error otherwise.
166
+ */
167
+int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt);
168
+
169
+/**
170
+ * Enqueues a buffer to a V4L2Context from an AVFrame
171
+ *
172
+ * The frame must be non NULL.
173
+ *
174
+ * @param[in] ctx The V4L2Context to enqueue to.
175
+ * @param[in] f A pointer to an AVFrame to enqueue.
176
+ * @return 0 in case of success, a negative error otherwise.
177
+ */
178
+int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* f);
179
+
180
+#endif // AVCODEC_V4L2_CONTEXT_H
0 181
new file mode 100644
... ...
@@ -0,0 +1,182 @@
0
+/*
1
+ * V4L2 format helper functions
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <search.h>
25
+#include "v4l2_fmt.h"
26
+
27
+#define V4L2_FMT(x) V4L2_PIX_FMT_##x
28
+#define AV_CODEC(x) AV_CODEC_ID_##x
29
+#define AV_FMT(x)   AV_PIX_FMT_##x
30
+
31
+static const struct fmt_conversion {
32
+    enum AVPixelFormat avfmt;
33
+    enum AVCodecID avcodec;
34
+    uint32_t v4l2_fmt;
35
+} fmt_map[] = {
36
+    { AV_FMT(RGB555LE),    AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB555) },
37
+    { AV_FMT(RGB555BE),    AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB555X) },
38
+    { AV_FMT(RGB565LE),    AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB565) },
39
+    { AV_FMT(RGB565BE),    AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB565X) },
40
+    { AV_FMT(BGR24),       AV_CODEC(RAWVIDEO),    V4L2_FMT(BGR24) },
41
+    { AV_FMT(RGB24),       AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB24) },
42
+    { AV_FMT(BGR0),        AV_CODEC(RAWVIDEO),    V4L2_FMT(BGR32) },
43
+    { AV_FMT(0RGB),        AV_CODEC(RAWVIDEO),    V4L2_FMT(RGB32) },
44
+    { AV_FMT(GRAY8),       AV_CODEC(RAWVIDEO),    V4L2_FMT(GREY) },
45
+    { AV_FMT(YUV420P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUV420) },
46
+    { AV_FMT(YUYV422),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUYV) },
47
+    { AV_FMT(UYVY422),     AV_CODEC(RAWVIDEO),    V4L2_FMT(UYVY) },
48
+    { AV_FMT(YUV422P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUV422P) },
49
+    { AV_FMT(YUV411P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUV411P) },
50
+    { AV_FMT(YUV410P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUV410) },
51
+    { AV_FMT(YUV410P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YVU410) },
52
+    { AV_FMT(NV12),        AV_CODEC(RAWVIDEO),    V4L2_FMT(NV12) },
53
+    { AV_FMT(NONE),        AV_CODEC(MJPEG),       V4L2_FMT(MJPEG) },
54
+    { AV_FMT(NONE),        AV_CODEC(MJPEG),       V4L2_FMT(JPEG) },
55
+#ifdef V4L2_PIX_FMT_SRGGB8
56
+    { AV_FMT(BAYER_BGGR8), AV_CODEC(RAWVIDEO),    V4L2_FMT(SBGGR8) },
57
+    { AV_FMT(BAYER_GBRG8), AV_CODEC(RAWVIDEO),    V4L2_FMT(SGBRG8) },
58
+    { AV_FMT(BAYER_GRBG8), AV_CODEC(RAWVIDEO),    V4L2_FMT(SGRBG8) },
59
+    { AV_FMT(BAYER_RGGB8), AV_CODEC(RAWVIDEO),    V4L2_FMT(SRGGB8) },
60
+#endif
61
+#ifdef V4L2_PIX_FMT_Y16
62
+    { AV_FMT(GRAY16LE),    AV_CODEC(RAWVIDEO),    V4L2_FMT(Y16) },
63
+#endif
64
+#ifdef V4L2_PIX_FMT_NV12M
65
+    { AV_FMT(NV12),        AV_CODEC(RAWVIDEO),    V4L2_FMT(NV12M) },
66
+#endif
67
+#ifdef V4L2_PIX_FMT_NV21M
68
+    { AV_FMT(NV21),        AV_CODEC(RAWVIDEO),    V4L2_FMT(NV21M) },
69
+#endif
70
+#ifdef V4L2_PIX_FMT_YUV420M
71
+    { AV_FMT(YUV420P),     AV_CODEC(RAWVIDEO),    V4L2_FMT(YUV420M) },
72
+#endif
73
+#ifdef V4L2_PIX_FMT_NV16M
74
+    { AV_FMT(NV16),        AV_CODEC(RAWVIDEO),    V4L2_FMT(NV16M) },
75
+#endif
76
+#ifdef V4L2_PIX_FMT_H263
77
+    { AV_FMT(NONE),        AV_CODEC(H263),        V4L2_FMT(H263) },
78
+#endif
79
+#ifdef V4L2_PIX_FMT_H264
80
+    { AV_FMT(NONE),        AV_CODEC(H264),        V4L2_FMT(H264) },
81
+#endif
82
+#ifdef V4L2_PIX_FMT_MPEG4
83
+    { AV_FMT(NONE),        AV_CODEC(MPEG4),       V4L2_FMT(MPEG4) },
84
+#endif
85
+#ifdef V4L2_PIX_FMT_CPIA1
86
+    { AV_FMT(NONE),        AV_CODEC(CPIA),        V4L2_FMT(CPIA1) },
87
+#endif
88
+#ifdef V4L2_PIX_FMT_DV
89
+    { AV_FMT(NONE),        AV_CODEC(DVVIDEO),     V4L2_FMT(DV) },
90
+#endif
91
+#ifdef V4L2_PIX_FMT_MPEG1
92
+    { AV_FMT(NONE),        AV_CODEC(MPEG1VIDEO),  V4L2_FMT(MPEG1) },
93
+#endif
94
+#ifdef V4L2_PIX_FMT_MPEG2
95
+    { AV_FMT(NONE),        AV_CODEC(MPEG2VIDEO),  V4L2_FMT(MPEG2) },
96
+#endif
97
+#ifdef V4L2_PIX_FMT_VP8
98
+    { AV_FMT(NONE),        AV_CODEC(VP8),         V4L2_FMT(VP8) },
99
+#endif
100
+#ifdef V4L2_PIX_FMT_VP9
101
+    { AV_FMT(NONE),        AV_CODEC(VP9),         V4L2_FMT(VP9) },
102
+#endif
103
+#ifdef V4L2_PIX_FMT_HEVC
104
+    { AV_FMT(NONE),        AV_CODEC(HEVC),        V4L2_FMT(HEVC) },
105
+#endif
106
+#ifdef V4L2_PIX_FMT_VC1_ANNEX_G
107
+    { AV_FMT(NONE),        AV_CODEC(VC1),         V4L2_FMT(VC1_ANNEX_G) },
108
+#endif
109
+};
110
+
111
+static int match_codec(const void *a, const void *b)
112
+{
113
+    if (*(enum AVCodecID *)a == ((struct fmt_conversion *)b)->avcodec)
114
+        return 0;
115
+
116
+    return 1;
117
+}
118
+
119
+uint32_t ff_v4l2_format_avcodec_to_v4l2(enum AVCodecID avcodec)
120
+{
121
+    size_t len = FF_ARRAY_ELEMS(fmt_map);
122
+    struct fmt_conversion *item;
123
+
124
+    item = lfind(&avcodec, fmt_map, &len, sizeof(fmt_map[0]), match_codec);
125
+    if (item)
126
+        return item->v4l2_fmt;
127
+
128
+    return 0;
129
+}
130
+
131
+static int match_fmt(const void *a, const void *b)
132
+{
133
+    if ( *(enum AVPixelFormat *)a == ((struct fmt_conversion *)b)->avfmt)
134
+        return 0;
135
+
136
+    return 1;
137
+}
138
+
139
+uint32_t ff_v4l2_format_avfmt_to_v4l2(enum AVPixelFormat avfmt)
140
+{
141
+    size_t len = FF_ARRAY_ELEMS(fmt_map);
142
+    struct fmt_conversion *item;
143
+
144
+    item = lfind(&avfmt, fmt_map, &len, sizeof(fmt_map[0]), match_fmt);
145
+    if (item)
146
+        return item->v4l2_fmt;
147
+
148
+    return 0;
149
+}
150
+
151
+struct v4l2fmt_avcodec_pair {
152
+    enum AVCodecID avcodec;
153
+    uint32_t v4l2_fmt;
154
+};
155
+
156
+static int match_codecfmt(const void *a, const void *b)
157
+{
158
+    struct v4l2fmt_avcodec_pair *key = (struct v4l2fmt_avcodec_pair *) a;
159
+    struct fmt_conversion *item = (struct fmt_conversion *) b;
160
+
161
+    if (key->avcodec == item->avcodec && key->v4l2_fmt == item->v4l2_fmt)
162
+        return 0;
163
+
164
+    return 1;
165
+}
166
+
167
+enum AVPixelFormat ff_v4l2_format_v4l2_to_avfmt(uint32_t v4l2_fmt, enum AVCodecID avcodec)
168
+{
169
+    struct v4l2fmt_avcodec_pair const key = {
170
+        .v4l2_fmt = v4l2_fmt,
171
+        .avcodec = avcodec,
172
+    };
173
+    size_t len = FF_ARRAY_ELEMS(fmt_map);
174
+    struct fmt_conversion *item;
175
+
176
+    item = lfind(&key, fmt_map, &len, sizeof(fmt_map[0]), match_codecfmt);
177
+    if (item)
178
+        return item->avfmt;
179
+
180
+    return AV_PIX_FMT_NONE;
181
+}
0 182
new file mode 100644
... ...
@@ -0,0 +1,34 @@
0
+/*
1
+ * V4L2 format helper functions
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#ifndef AVCODEC_V4L2_FMT_H
24
+#define AVCODEC_V4L2_FMT_H
25
+
26
+#include "libavcodec/avcodec.h"
27
+#include "libavutil/pixfmt.h"
28
+
29
+enum AVPixelFormat ff_v4l2_format_v4l2_to_avfmt(uint32_t v4l2_fmt, enum AVCodecID avcodec);
30
+uint32_t ff_v4l2_format_avcodec_to_v4l2(enum AVCodecID avcodec);
31
+uint32_t ff_v4l2_format_avfmt_to_v4l2(enum AVPixelFormat avfmt);
32
+
33
+#endif /* AVCODEC_V4L2_FMT_H*/
0 34
new file mode 100644
... ...
@@ -0,0 +1,383 @@
0
+/*
1
+ * V4L mem2mem
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <sys/ioctl.h>
25
+#include <sys/mman.h>
26
+#include <unistd.h>
27
+#include <dirent.h>
28
+#include <fcntl.h>
29
+#include "libavcodec/avcodec.h"
30
+#include "libavcodec/internal.h"
31
+#include "libavutil/pixdesc.h"
32
+#include "libavutil/imgutils.h"
33
+#include "libavutil/pixfmt.h"
34
+#include "v4l2_context.h"
35
+#include "v4l2_fmt.h"
36
+#include "v4l2_m2m.h"
37
+
38
+static inline int v4l2_splane_video(struct v4l2_capability *cap)
39
+{
40
+    if (cap->capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT) &&
41
+        cap->capabilities & V4L2_CAP_STREAMING)
42
+        return 1;
43
+
44
+    if (cap->capabilities & V4L2_CAP_VIDEO_M2M)
45
+        return 1;
46
+
47
+    return 0;
48
+}
49
+
50
+static inline int v4l2_mplane_video(struct v4l2_capability *cap)
51
+{
52
+    if (cap->capabilities & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE) &&
53
+        cap->capabilities & V4L2_CAP_STREAMING)
54
+        return 1;
55
+
56
+    if (cap->capabilities & V4L2_CAP_VIDEO_M2M_MPLANE)
57
+        return 1;
58
+
59
+    return 0;
60
+}
61
+
62
+static int v4l2_prepare_contexts(V4L2m2mContext* s)
63
+{
64
+    struct v4l2_capability cap;
65
+    int ret;
66
+
67
+    s->capture.done = s->output.done = 0;
68
+    s->capture.name = "capture";
69
+    s->output.name = "output ";
70
+    atomic_init(&s->refcount, 0);
71
+    sem_init(&s->refsync, 0, 0);
72
+
73
+    memset(&cap, 0, sizeof(cap));
74
+    ret = ioctl(s->fd, VIDIOC_QUERYCAP, &cap);
75
+    if (ret < 0)
76
+        return ret;
77
+
78
+    av_log(s->avctx, AV_LOG_INFO, "driver '%s' on card '%s'\n", cap.driver, cap.card);
79
+
80
+    if (v4l2_mplane_video(&cap)) {
81
+        s->capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
82
+        s->output.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
83
+        return 0;
84
+    }
85
+
86
+    if (v4l2_splane_video(&cap)) {
87
+        s->capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
88
+        s->output.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
89
+        return 0;
90
+    }
91
+
92
+    return AVERROR(EINVAL);
93
+}
94
+
95
+static int v4l2_probe_driver(V4L2m2mContext* s)
96
+{
97
+    int ret;
98
+
99
+    s->fd = open(s->devname, O_RDWR | O_NONBLOCK, 0);
100
+    if (s->fd < 0)
101
+        return AVERROR(errno);
102
+
103
+    ret = v4l2_prepare_contexts(s);
104
+    if (ret < 0)
105
+        goto done;
106
+
107
+    ret = ff_v4l2_context_get_format(&s->output);
108
+    if (ret) {
109
+        av_log(s->avctx, AV_LOG_DEBUG, "v4l2 output format not supported\n");
110
+        goto done;
111
+    }
112
+
113
+    ret = ff_v4l2_context_get_format(&s->capture);
114
+    if (ret) {
115
+        av_log(s->avctx, AV_LOG_DEBUG, "v4l2 capture format not supported\n");
116
+        goto done;
117
+    }
118
+
119
+done:
120
+    if (close(s->fd) < 0) {
121
+        ret = AVERROR(errno);
122
+        av_log(s->avctx, AV_LOG_ERROR, "failure closing %s (%s)\n", s->devname, av_err2str(AVERROR(errno)));
123
+    }
124
+
125
+    s->fd = -1;
126
+
127
+    return ret;
128
+}
129
+
130
+static int v4l2_configure_contexts(V4L2m2mContext* s)
131
+{
132
+    void *log_ctx = s->avctx;
133
+    int ret;
134
+
135
+    s->fd = open(s->devname, O_RDWR | O_NONBLOCK, 0);
136
+    if (s->fd < 0)
137
+        return AVERROR(errno);
138
+
139
+    ret = v4l2_prepare_contexts(s);
140
+    if (ret < 0)
141
+        goto error;
142
+
143
+    ret = ff_v4l2_context_set_format(&s->output);
144
+    if (ret) {
145
+        av_log(log_ctx, AV_LOG_ERROR, "can't set v4l2 output format\n");
146
+        goto error;
147
+    }
148
+
149
+    ret = ff_v4l2_context_set_format(&s->capture);
150
+    if (ret) {
151
+        av_log(log_ctx, AV_LOG_ERROR, "can't to set v4l2 capture format\n");
152
+        goto error;
153
+    }
154
+
155
+    ret = ff_v4l2_context_init(&s->output);
156
+    if (ret) {
157
+        av_log(log_ctx, AV_LOG_ERROR, "no v4l2 output context's buffers\n");
158
+        goto error;
159
+    }
160
+
161
+    /* decoder's buffers need to be updated at a later stage */
162
+    if (!av_codec_is_decoder(s->avctx->codec)) {
163
+        ret = ff_v4l2_context_init(&s->capture);
164
+        if (ret) {
165
+            av_log(log_ctx, AV_LOG_ERROR, "no v4l2 capture context's buffers\n");
166
+            goto error;
167
+        }
168
+    }
169
+
170
+    return 0;
171
+
172
+error:
173
+    if (close(s->fd) < 0) {
174
+        ret = AVERROR(errno);
175
+        av_log(log_ctx, AV_LOG_ERROR, "error closing %s (%s)\n",
176
+            s->devname, av_err2str(AVERROR(errno)));
177
+    }
178
+    s->fd = -1;
179
+
180
+    return ret;
181
+}
182
+
183
+/******************************************************************************
184
+ *
185
+ *                  V4L2 M2M Interface
186
+ *
187
+ ******************************************************************************/
188
+int ff_v4l2_m2m_codec_reinit(V4L2m2mContext* s)
189
+{
190
+    int ret;
191
+
192
+    av_log(s->avctx, AV_LOG_DEBUG, "reinit context\n");
193
+
194
+    /* 1. streamoff */
195
+    ret = ff_v4l2_context_set_status(&s->capture, VIDIOC_STREAMOFF);
196
+    if (ret)
197
+        av_log(s->avctx, AV_LOG_ERROR, "capture VIDIOC_STREAMOFF\n");
198
+
199
+    /* 2. unmap the capture buffers (v4l2 and ffmpeg):
200
+     *    we must wait for all references to be released before being allowed
201
+     *    to queue new buffers.
202
+     */
203
+    av_log(s->avctx, AV_LOG_DEBUG, "waiting for user to release AVBufferRefs\n");
204
+    if (atomic_load(&s->refcount))
205
+        while(sem_wait(&s->refsync) == -1 && errno == EINTR);
206
+
207
+    ff_v4l2_context_release(&s->capture);
208
+
209
+    /* 3. get the new capture format */
210
+    ret = ff_v4l2_context_get_format(&s->capture);
211
+    if (ret) {
212
+        av_log(s->avctx, AV_LOG_ERROR, "query the new capture format\n");
213
+        return ret;
214
+    }
215
+
216
+    /* 4. set the capture format */
217
+    ret = ff_v4l2_context_set_format(&s->capture);
218
+    if (ret) {
219
+        av_log(s->avctx, AV_LOG_ERROR, "setting capture format\n");
220
+        return ret;
221
+    }
222
+
223
+    /* 5. complete reinit */
224
+    sem_destroy(&s->refsync);
225
+    sem_init(&s->refsync, 0, 0);
226
+    s->draining = 0;
227
+    s->reinit = 0;
228
+
229
+    return 0;
230
+}
231
+
232
+int ff_v4l2_m2m_codec_full_reinit(V4L2m2mContext *s)
233
+{
234
+    void *log_ctx = s->avctx;
235
+    int ret;
236
+
237
+    av_log(log_ctx, AV_LOG_DEBUG, "%s full reinit\n", s->devname);
238
+
239
+    /* wait for pending buffer references */
240
+    if (atomic_load(&s->refcount))
241
+        while(sem_wait(&s->refsync) == -1 && errno == EINTR);
242
+
243
+    /* close the driver */
244
+    ff_v4l2_m2m_codec_end(s->avctx);
245
+
246
+    /* start again now that we know the stream dimensions */
247
+    s->draining = 0;
248
+    s->reinit = 0;
249
+
250
+    s->fd = open(s->devname, O_RDWR | O_NONBLOCK, 0);
251
+    if (s->fd < 0)
252
+        return AVERROR(errno);
253
+
254
+    ret = v4l2_prepare_contexts(s);
255
+    if (ret < 0)
256
+        goto error;
257
+
258
+    /* if a full re-init was requested - probe didn't run - we need to populate
259
+     * the format for each context
260
+     */
261
+    ret = ff_v4l2_context_get_format(&s->output);
262
+    if (ret) {
263
+        av_log(log_ctx, AV_LOG_DEBUG, "v4l2 output format not supported\n");
264
+        goto error;
265
+    }
266
+
267
+    ret = ff_v4l2_context_get_format(&s->capture);
268
+    if (ret) {
269
+        av_log(log_ctx, AV_LOG_DEBUG, "v4l2 capture format not supported\n");
270
+        goto error;
271
+    }
272
+
273
+    ret = ff_v4l2_context_set_format(&s->output);
274
+    if (ret) {
275
+        av_log(log_ctx, AV_LOG_ERROR, "can't set v4l2 output format\n");
276
+        goto error;
277
+    }
278
+
279
+    ret = ff_v4l2_context_set_format(&s->capture);
280
+    if (ret) {
281
+        av_log(log_ctx, AV_LOG_ERROR, "can't to set v4l2 capture format\n");
282
+        goto error;
283
+    }
284
+
285
+    ret = ff_v4l2_context_init(&s->output);
286
+    if (ret) {
287
+        av_log(log_ctx, AV_LOG_ERROR, "no v4l2 output context's buffers\n");
288
+        goto error;
289
+    }
290
+
291
+    /* decoder's buffers need to be updated at a later stage */
292
+    if (!av_codec_is_decoder(s->avctx->codec)) {
293
+        ret = ff_v4l2_context_init(&s->capture);
294
+        if (ret) {
295
+            av_log(log_ctx, AV_LOG_ERROR, "no v4l2 capture context's buffers\n");
296
+            goto error;
297
+        }
298
+    }
299
+
300
+    return 0;
301
+
302
+error:
303
+    if (close(s->fd) < 0) {
304
+        ret = AVERROR(errno);
305
+        av_log(log_ctx, AV_LOG_ERROR, "error closing %s (%s)\n",
306
+            s->devname, av_err2str(AVERROR(errno)));
307
+    }
308
+    s->fd = -1;
309
+
310
+    return ret;
311
+}
312
+
313
+int ff_v4l2_m2m_codec_end(AVCodecContext *avctx)
314
+{
315
+    V4L2m2mContext* s = avctx->priv_data;
316
+    int ret;
317
+
318
+    ret = ff_v4l2_context_set_status(&s->output, VIDIOC_STREAMOFF);
319
+    if (ret)
320
+            av_log(avctx, AV_LOG_ERROR, "VIDIOC_STREAMOFF %s\n", s->output.name);
321
+
322
+    ret = ff_v4l2_context_set_status(&s->capture, VIDIOC_STREAMOFF);
323
+    if (ret)
324
+        av_log(avctx, AV_LOG_ERROR, "VIDIOC_STREAMOFF %s\n", s->capture.name);
325
+
326
+    ff_v4l2_context_release(&s->output);
327
+
328
+    if (atomic_load(&s->refcount))
329
+        av_log(avctx, AV_LOG_ERROR, "ff_v4l2m2m_codec_end leaving pending buffers\n");
330
+
331
+    ff_v4l2_context_release(&s->capture);
332
+    sem_destroy(&s->refsync);
333
+
334
+    /* release the hardware */
335
+    if (close(s->fd) < 0 )
336
+        av_log(avctx, AV_LOG_ERROR, "failure closing %s (%s)\n", s->devname, av_err2str(AVERROR(errno)));
337
+
338
+    s->fd = -1;
339
+
340
+    return 0;
341
+}
342
+
343
+int ff_v4l2_m2m_codec_init(AVCodecContext *avctx)
344
+{
345
+    int ret = AVERROR(EINVAL);
346
+    struct dirent *entry;
347
+    char node[PATH_MAX];
348
+    DIR *dirp;
349
+
350
+    V4L2m2mContext *s = avctx->priv_data;
351
+    s->avctx = avctx;
352
+
353
+    dirp = opendir("/dev");
354
+    if (!dirp)
355
+        return AVERROR(errno);
356
+
357
+    for (entry = readdir(dirp); entry; entry = readdir(dirp)) {
358
+
359
+        if (strncmp(entry->d_name, "video", 5))
360
+            continue;
361
+
362
+        snprintf(node, sizeof(node), "/dev/%s", entry->d_name);
363
+        av_log(s->avctx, AV_LOG_DEBUG, "probing device %s\n", node);
364
+        strncpy(s->devname, node, strlen(node) + 1);
365
+        ret = v4l2_probe_driver(s);
366
+        if (!ret)
367
+                break;
368
+    }
369
+
370
+    closedir(dirp);
371
+
372
+    if (ret) {
373
+        av_log(s->avctx, AV_LOG_ERROR, "Could not find a valid device\n");
374
+        memset(s->devname, 0, sizeof(s->devname));
375
+
376
+        return ret;
377
+    }
378
+
379
+    av_log(s->avctx, AV_LOG_INFO, "Using device %s\n", node);
380
+
381
+    return v4l2_configure_contexts(s);
382
+}
0 383
new file mode 100644
... ...
@@ -0,0 +1,103 @@
0
+/*
1
+ * V4L2 mem2mem helper functions
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#ifndef AVCODEC_V4L2_M2M_H
24
+#define AVCODEC_V4L2_M2M_H
25
+
26
+#include <semaphore.h>
27
+#include <unistd.h>
28
+#include <dirent.h>
29
+#include "libavcodec/avcodec.h"
30
+#include "v4l2_context.h"
31
+
32
+#define container_of(ptr, type, member) ({ \
33
+        const __typeof__(((type *)0)->member ) *__mptr = (ptr); \
34
+        (type *)((char *)__mptr - offsetof(type,member) );})
35
+
36
+#define V4L_M2M_DEFAULT_OPTS \
37
+    { "num_output_buffers", "Number of buffers in the output context",\
38
+        OFFSET(output.num_buffers), AV_OPT_TYPE_INT, { .i64 = 16 }, 6, INT_MAX, FLAGS }
39
+
40
+typedef struct V4L2m2mContext
41
+{
42
+    AVClass *class;
43
+    char devname[PATH_MAX];
44
+    int fd;
45
+
46
+    /* the codec context queues */
47
+    V4L2Context capture;
48
+    V4L2Context output;
49
+
50
+    /* refcount of buffers held by the user */
51
+    atomic_uint refcount;
52
+
53
+    /* dynamic stream reconfig */
54
+    AVCodecContext *avctx;
55
+    sem_t refsync;
56
+    int reinit;
57
+
58
+    /* null frame/packet received */
59
+    int draining;
60
+} V4L2m2mContext;
61
+
62
+/**
63
+ * Probes the video nodes looking for the required codec capabilities.
64
+ *
65
+ * @param[in] ctx The AVCodecContext instantiated by the encoder/decoder.
66
+ *
67
+ * @returns 0 if a driver is found, a negative number otherwise.
68
+ */
69
+int ff_v4l2_m2m_codec_init(AVCodecContext *avctx);
70
+
71
+/**
72
+ * Releases all the codec resources if all AVBufferRefs have been returned to the
73
+ * ctx. Otherwise keep the driver open.
74
+ *
75
+ * @param[in] The AVCodecContext instantiated by the encoder/decoder.
76
+ *
77
+ * @returns 0
78
+ *
79
+ */
80
+int ff_v4l2_m2m_codec_end(AVCodecContext *avctx);
81
+
82
+/**
83
+ * Reinitializes the V4L2m2mContext when the driver cant continue processing
84
+ * with the capture parameters.
85
+ *
86
+ * @param[in] ctx The V4L2m2mContext instantiated by the encoder/decoder.
87
+ *
88
+ * @returns 0 in case of success, negative number otherwise
89
+ */
90
+int ff_v4l2_m2m_codec_reinit(V4L2m2mContext *ctx);
91
+
92
+/**
93
+ * Reinitializes the V4L2m2mContext when the driver cant continue processing
94
+ * with the  any of the current V4L2Contexts (ie, changes in output and capture).
95
+ *
96
+ * @param[in] ctx The V4L2m2mContext instantiated by the encoder/decoder.
97
+ *
98
+ * @returns 0 in case of success, negative number otherwise
99
+ */
100
+int ff_v4l2_m2m_codec_full_reinit(V4L2m2mContext *ctx);
101
+
102
+#endif /* AVCODEC_V4L2_M2M_H */
0 103
new file mode 100644
... ...
@@ -0,0 +1,228 @@
0
+/*
1
+ * V4L2 mem2mem decoders
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <sys/ioctl.h>
25
+#include "libavutil/pixfmt.h"
26
+#include "libavutil/pixdesc.h"
27
+#include "libavutil/opt.h"
28
+#include "libavcodec/avcodec.h"
29
+#include "libavcodec/decode.h"
30
+
31
+#include "v4l2_context.h"
32
+#include "v4l2_m2m.h"
33
+#include "v4l2_fmt.h"
34
+
35
+static int v4l2_try_start(AVCodecContext *avctx)
36
+{
37
+    V4L2m2mContext *s = avctx->priv_data;
38
+    V4L2Context *const capture = &s->capture;
39
+    V4L2Context *const output = &s->output;
40
+    struct v4l2_selection selection;
41
+    int ret;
42
+
43
+    /* 1. start the output process */
44
+    if (!output->streamon) {
45
+        ret = ff_v4l2_context_set_status(output, VIDIOC_STREAMON);
46
+        if (ret < 0) {
47
+            av_log(avctx, AV_LOG_DEBUG, "VIDIOC_STREAMON on output context\n");
48
+            return ret;
49
+        }
50
+    }
51
+
52
+    if (capture->streamon)
53
+        return 0;
54
+
55
+    /* 2. get the capture format */
56
+    capture->format.type = capture->type;
57
+    ret = ioctl(s->fd, VIDIOC_G_FMT, &capture->format);
58
+    if (ret) {
59
+        av_log(avctx, AV_LOG_WARNING, "VIDIOC_G_FMT ioctl\n");
60
+        return ret;
61
+    }
62
+
63
+    /* 2.1 update the AVCodecContext */
64
+    avctx->pix_fmt = ff_v4l2_format_v4l2_to_avfmt(capture->format.fmt.pix_mp.pixelformat, AV_CODEC_ID_RAWVIDEO);
65
+    capture->av_pix_fmt = avctx->pix_fmt;
66
+
67
+    /* 3. set the crop parameters */
68
+    selection.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
69
+    selection.r.height = avctx->coded_height;
70
+    selection.r.width = avctx->coded_width;
71
+    ret = ioctl(s->fd, VIDIOC_S_SELECTION, &selection);
72
+    if (!ret) {
73
+        ret = ioctl(s->fd, VIDIOC_G_SELECTION, &selection);
74
+        if (ret) {
75
+            av_log(avctx, AV_LOG_WARNING, "VIDIOC_G_SELECTION ioctl\n");
76
+        } else {
77
+            av_log(avctx, AV_LOG_DEBUG, "crop output %dx%d\n", selection.r.width, selection.r.height);
78
+            /* update the size of the resulting frame */
79
+            capture->height = selection.r.height;
80
+            capture->width  = selection.r.width;
81
+        }
82
+    }
83
+
84
+    /* 4. init the capture context now that we have the capture format */
85
+    if (!capture->buffers) {
86
+        ret = ff_v4l2_context_init(capture);
87
+        if (ret) {
88
+            av_log(avctx, AV_LOG_DEBUG, "can't request output buffers\n");
89
+            return ret;
90
+        }
91
+    }
92
+
93
+    /* 5. start the capture process */
94
+    ret = ff_v4l2_context_set_status(capture, VIDIOC_STREAMON);
95
+    if (ret) {
96
+        av_log(avctx, AV_LOG_DEBUG, "VIDIOC_STREAMON, on capture context\n");
97
+        return ret;
98
+    }
99
+
100
+    return 0;
101
+}
102
+
103
+static int v4l2_prepare_decoder(V4L2m2mContext *s)
104
+{
105
+    struct v4l2_event_subscription sub;
106
+    V4L2Context *output = &s->output;
107
+    int ret;
108
+
109
+    /**
110
+     * requirements
111
+     */
112
+    memset(&sub, 0, sizeof(sub));
113
+    sub.type = V4L2_EVENT_SOURCE_CHANGE;
114
+    ret = ioctl(s->fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
115
+    if ( ret < 0) {
116
+        if (output->height == 0 || output->width == 0) {
117
+            av_log(s->avctx, AV_LOG_ERROR,
118
+                "the v4l2 driver does not support VIDIOC_SUBSCRIBE_EVENT\n"
119
+                "you must provide codec_height and codec_width on input\n");
120
+            return ret;
121
+        }
122
+    }
123
+
124
+    return 0;
125
+}
126
+
127
+static int v4l2_receive_frame(AVCodecContext *avctx, AVFrame *frame)
128
+{
129
+    V4L2m2mContext *s = avctx->priv_data;
130
+    V4L2Context *const capture = &s->capture;
131
+    V4L2Context *const output = &s->output;
132
+    AVPacket avpkt = {0};
133
+    int ret;
134
+
135
+    ret = ff_decode_get_packet(avctx, &avpkt);
136
+    if (ret < 0 && ret != AVERROR_EOF)
137
+        return ret;
138
+
139
+    if (s->draining)
140
+        goto dequeue;
141
+
142
+    ret = ff_v4l2_context_enqueue_packet(output, &avpkt);
143
+    if (ret < 0) {
144
+        if (ret != AVERROR(ENOMEM))
145
+           return ret;
146
+        /* no input buffers available, continue dequeing */
147
+    }
148
+
149
+    if (avpkt.size) {
150
+        ret = v4l2_try_start(avctx);
151
+        if (ret)
152
+            return 0;
153
+    }
154
+
155
+dequeue:
156
+    return ff_v4l2_context_dequeue_frame(capture, frame);
157
+}
158
+
159
+static av_cold int v4l2_decode_init(AVCodecContext *avctx)
160
+{
161
+    V4L2m2mContext *s = avctx->priv_data;
162
+    V4L2Context *capture = &s->capture;
163
+    V4L2Context *output = &s->output;
164
+    int ret;
165
+
166
+    /* if these dimensions are invalid (ie, 0 or too small) an event will be raised
167
+     * by the v4l2 driver; this event will trigger a full pipeline reconfig and
168
+     * the proper values will be retrieved from the kernel driver.
169
+     */
170
+    output->height = capture->height = avctx->coded_height;
171
+    output->width = capture->width = avctx->coded_width;
172
+
173
+    output->av_codec_id = avctx->codec_id;
174
+    output->av_pix_fmt  = AV_PIX_FMT_NONE;
175
+
176
+    capture->av_codec_id = AV_CODEC_ID_RAWVIDEO;
177
+    capture->av_pix_fmt = avctx->pix_fmt;
178
+
179
+    ret = ff_v4l2_m2m_codec_init(avctx);
180
+    if (ret) {
181
+        av_log(avctx, AV_LOG_ERROR, "can't configure decoder\n");
182
+        return ret;
183
+    }
184
+
185
+    return v4l2_prepare_decoder(s);
186
+}
187
+
188
+#define OFFSET(x) offsetof(V4L2m2mContext, x)
189
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
190
+
191
+static const AVOption options[] = {
192
+    V4L_M2M_DEFAULT_OPTS,
193
+    { "num_capture_buffers", "Number of buffers in the capture context",
194
+        OFFSET(capture.num_buffers), AV_OPT_TYPE_INT, {.i64 = 20}, 20, INT_MAX, FLAGS },
195
+    { NULL},
196
+};
197
+
198
+#define M2MDEC(NAME, LONGNAME, CODEC, bsf_name) \
199
+static const AVClass v4l2_m2m_ ## NAME ## _dec_class = {\
200
+    .class_name = #NAME "_v4l2_m2m_decoder",\
201
+    .item_name  = av_default_item_name,\
202
+    .option     = options,\
203
+    .version    = LIBAVUTIL_VERSION_INT,\
204
+};\
205
+\
206
+AVCodec ff_ ## NAME ## _v4l2m2m_decoder = { \
207
+    .name           = #NAME "_v4l2m2m" ,\
208
+    .long_name      = NULL_IF_CONFIG_SMALL("V4L2 mem2mem " LONGNAME " decoder wrapper"),\
209
+    .type           = AVMEDIA_TYPE_VIDEO,\
210
+    .id             = CODEC ,\
211
+    .priv_data_size = sizeof(V4L2m2mContext),\
212
+    .priv_class     = &v4l2_m2m_ ## NAME ## _dec_class,\
213
+    .init           = v4l2_decode_init,\
214
+    .receive_frame  = v4l2_receive_frame,\
215
+    .close          = ff_v4l2_m2m_codec_end,\
216
+    .bsfs           = bsf_name, \
217
+};
218
+
219
+M2MDEC(h264,  "H.264", AV_CODEC_ID_H264,       "h264_mp4toannexb");
220
+M2MDEC(hevc,  "HEVC",  AV_CODEC_ID_HEVC,       "hevc_mp4toannexb");
221
+M2MDEC(mpeg1, "MPEG1", AV_CODEC_ID_MPEG1VIDEO, NULL);
222
+M2MDEC(mpeg2, "MPEG2", AV_CODEC_ID_MPEG2VIDEO, NULL);
223
+M2MDEC(mpeg4, "MPEG4", AV_CODEC_ID_MPEG4,      NULL);
224
+M2MDEC(h263,  "H.263", AV_CODEC_ID_H263,       NULL);
225
+M2MDEC(vc1 ,  "VC1",   AV_CODEC_ID_VC1,        NULL);
226
+M2MDEC(vp8,   "VP8",   AV_CODEC_ID_VP8,        NULL);
227
+M2MDEC(vp9,   "VP9",   AV_CODEC_ID_VP9,        NULL);
0 228
new file mode 100644
... ...
@@ -0,0 +1,352 @@
0
+/*
1
+ * V4L2 mem2mem encoders
2
+ *
3
+ * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
4
+ * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
5
+ *
6
+ * This file is part of FFmpeg.
7
+ *
8
+ * FFmpeg is free software; you can redistribute it and/or
9
+ * modify it under the terms of the GNU Lesser General Public
10
+ * License as published by the Free Software Foundation; either
11
+ * version 2.1 of the License, or (at your option) any later version.
12
+ *
13
+ * FFmpeg is distributed in the hope that it will be useful,
14
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
+ * Lesser General Public License for more details.
17
+ *
18
+ * You should have received a copy of the GNU Lesser General Public
19
+ * License along with FFmpeg; if not, write to the Free Software
20
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
+ */
22
+
23
+#include <linux/videodev2.h>
24
+#include <sys/ioctl.h>
25
+#include <search.h>
26
+#include "libavcodec/avcodec.h"
27
+#include "libavutil/pixdesc.h"
28
+#include "libavutil/pixfmt.h"
29
+#include "libavutil/opt.h"
30
+#include "v4l2_context.h"
31
+#include "v4l2_m2m.h"
32
+
33
+#define MPEG_CID(x) V4L2_CID_MPEG_VIDEO_##x
34
+#define MPEG_VIDEO(x) V4L2_MPEG_VIDEO_##x
35
+
36
+static inline void v4l2_set_timeperframe(V4L2m2mContext *s, unsigned int num, unsigned int den)
37
+{
38
+    struct v4l2_streamparm parm = { 0 };
39
+
40
+    parm.type = V4L2_TYPE_IS_MULTIPLANAR(s->output.type) ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE : V4L2_BUF_TYPE_VIDEO_OUTPUT;
41
+    parm.parm.output.timeperframe.denominator = den;
42
+    parm.parm.output.timeperframe.numerator = num;
43
+
44
+    if (ioctl(s->fd, VIDIOC_S_PARM, &parm) < 0)
45
+        av_log(s->avctx, AV_LOG_WARNING, "Failed to set timeperframe");
46
+}
47
+
48
+static inline void v4l2_set_ext_ctrl(V4L2m2mContext *s, unsigned int id, signed int value, const char *name)
49
+{
50
+    struct v4l2_ext_controls ctrls = { 0 };
51
+    struct v4l2_ext_control ctrl = { 0 };
52
+
53
+    /* set ctrls */
54
+    ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
55
+    ctrls.controls = &ctrl;
56
+    ctrls.count = 1;
57
+
58
+    /* set ctrl*/
59
+    ctrl.value = value;
60
+    ctrl.id = id ;
61
+
62
+    if (ioctl(s->fd, VIDIOC_S_EXT_CTRLS, &ctrls) < 0)
63
+        av_log(s->avctx, AV_LOG_WARNING, "Failed to set %s\n", name);
64
+    else
65
+        av_log(s->avctx, AV_LOG_DEBUG, "Encoder: %s = %d\n", name, value);
66
+}
67
+
68
+static inline int v4l2_get_ext_ctrl(V4L2m2mContext *s, unsigned int id, signed int *value, const char *name)
69
+{
70
+    struct v4l2_ext_controls ctrls = { 0 };
71
+    struct v4l2_ext_control ctrl = { 0 };
72
+    int ret;
73
+
74
+    /* set ctrls */
75
+    ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
76
+    ctrls.controls = &ctrl;
77
+    ctrls.count = 1;
78
+
79
+    /* set ctrl*/
80
+    ctrl.id = id ;
81
+
82
+    ret = ioctl(s->fd, VIDIOC_G_EXT_CTRLS, &ctrls);
83
+    if (ret < 0) {
84
+        av_log(s->avctx, AV_LOG_WARNING, "Failed to set %s\n", name);
85
+        return ret;
86
+    }
87
+
88
+    *value = ctrl.value;
89
+
90
+    return 0;
91
+}
92
+
93
+static int match_profile(const void *a, const void *b)
94
+{
95
+    if (*(unsigned int *)a == *(unsigned int *)b)
96
+        return 0;
97
+
98
+    return 1;
99
+}
100
+
101
+static inline unsigned int v4l2_h264_profile_from_ff(int p)
102
+{
103
+    struct h264_profile  {
104
+        unsigned int ffmpeg_val;
105
+        unsigned int v4l2_val;
106
+    } *val, profile[] = {
107
+        { FF_PROFILE_H264_CONSTRAINED_BASELINE, MPEG_VIDEO(H264_PROFILE_CONSTRAINED_BASELINE) },
108
+        { FF_PROFILE_H264_HIGH_444_PREDICTIVE, MPEG_VIDEO(H264_PROFILE_HIGH_444_PREDICTIVE) },
109
+        { FF_PROFILE_H264_HIGH_422_INTRA, MPEG_VIDEO(H264_PROFILE_HIGH_422_INTRA) },
110
+        { FF_PROFILE_H264_HIGH_444_INTRA, MPEG_VIDEO(H264_PROFILE_HIGH_444_INTRA) },
111
+        { FF_PROFILE_H264_HIGH_10_INTRA, MPEG_VIDEO(H264_PROFILE_HIGH_10_INTRA) },
112
+        { FF_PROFILE_H264_HIGH_422, MPEG_VIDEO(H264_PROFILE_HIGH_422) },
113
+        { FF_PROFILE_H264_BASELINE, MPEG_VIDEO(H264_PROFILE_BASELINE) },
114
+        { FF_PROFILE_H264_EXTENDED, MPEG_VIDEO(H264_PROFILE_EXTENDED) },
115
+        { FF_PROFILE_H264_HIGH_10, MPEG_VIDEO(H264_PROFILE_HIGH_10) },
116
+        { FF_PROFILE_H264_MAIN, MPEG_VIDEO(H264_PROFILE_MAIN) },
117
+        { FF_PROFILE_H264_HIGH, MPEG_VIDEO(H264_PROFILE_HIGH) },
118
+    };
119
+    size_t len = FF_ARRAY_ELEMS(profile);
120
+
121
+    val = lfind(&p, profile, &len, sizeof(profile[0]), match_profile);
122
+    if (val)
123
+        return val->v4l2_val;
124
+
125
+    return AVERROR(ENOENT);
126
+}
127
+
128
+static inline int v4l2_mpeg4_profile_from_ff(int p)
129
+{
130
+    struct mpeg4_profile {
131
+        unsigned int ffmpeg_val;
132
+        unsigned int v4l2_val;
133
+    } *val, profile[] = {
134
+        { FF_PROFILE_MPEG4_ADVANCED_CODING, MPEG_VIDEO(MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY) },
135
+        { FF_PROFILE_MPEG4_ADVANCED_SIMPLE, MPEG_VIDEO(MPEG4_PROFILE_ADVANCED_SIMPLE) },
136
+        { FF_PROFILE_MPEG4_SIMPLE_SCALABLE, MPEG_VIDEO(MPEG4_PROFILE_SIMPLE_SCALABLE) },
137
+        { FF_PROFILE_MPEG4_SIMPLE, MPEG_VIDEO(MPEG4_PROFILE_SIMPLE) },
138
+        { FF_PROFILE_MPEG4_CORE, MPEG_VIDEO(MPEG4_PROFILE_CORE) },
139
+    };
140
+    size_t len = FF_ARRAY_ELEMS(profile);
141
+
142
+    val = lfind(&p, profile, &len, sizeof(profile[0]), match_profile);
143
+    if (val)
144
+        return val->v4l2_val;
145
+
146
+    return AVERROR(ENOENT);
147
+}
148
+
149
+static int v4l2_check_b_frame_support(V4L2m2mContext *s)
150
+{
151
+    if (s->avctx->max_b_frames)
152
+        av_log(s->avctx, AV_LOG_WARNING, "Encoder does not support b-frames yet\n");
153
+
154
+    v4l2_set_ext_ctrl(s, MPEG_CID(B_FRAMES), 0, "number of B-frames");
155
+    v4l2_get_ext_ctrl(s, MPEG_CID(B_FRAMES), &s->avctx->max_b_frames, "number of B-frames");
156
+    if (s->avctx->max_b_frames == 0)
157
+        return 0;
158
+
159
+    avpriv_report_missing_feature(s->avctx, "DTS/PTS calculation for V4L2 encoding");
160
+
161
+    return AVERROR_PATCHWELCOME;
162
+}
163
+
164
+static int v4l2_prepare_encoder(V4L2m2mContext *s)
165
+{
166
+    AVCodecContext *avctx = s->avctx;
167
+    int qmin_cid, qmax_cid, qmin, qmax;
168
+    int ret, val;
169
+
170
+    /**
171
+     * requirements
172
+     */
173
+    ret = v4l2_check_b_frame_support(s);
174
+    if (ret)
175
+        return ret;
176
+
177
+    /**
178
+     * settingss
179
+     */
180
+    if (avctx->framerate.num || avctx->framerate.den)
181
+        v4l2_set_timeperframe(s, avctx->framerate.num, avctx->framerate.den);
182
+
183
+    /* set ext ctrls */
184
+    v4l2_set_ext_ctrl(s, MPEG_CID(HEADER_MODE), MPEG_VIDEO(HEADER_MODE_SEPARATE), "header mode");
185
+    v4l2_set_ext_ctrl(s, MPEG_CID(BITRATE) , avctx->bit_rate, "bit rate");
186
+    v4l2_set_ext_ctrl(s, MPEG_CID(GOP_SIZE), avctx->gop_size,"gop size");
187
+
188
+    av_log(avctx, AV_LOG_DEBUG,
189
+        "Encoder Context: id (%d), profile (%d), frame rate(%d/%d), number b-frames (%d), "
190
+        "gop size (%d), bit rate (%ld), qmin (%d), qmax (%d)\n",
191
+        avctx->codec_id, avctx->profile, avctx->framerate.num, avctx->framerate.den,
192
+        avctx->max_b_frames, avctx->gop_size, avctx->bit_rate, avctx->qmin, avctx->qmax);
193
+
194
+    switch (avctx->codec_id) {
195
+    case AV_CODEC_ID_H264:
196
+        val = v4l2_h264_profile_from_ff(avctx->profile);
197
+        if (val < 0)
198
+            av_log(avctx, AV_LOG_WARNING, "h264 profile not found\n");
199
+        else
200
+            v4l2_set_ext_ctrl(s, MPEG_CID(H264_PROFILE), val, "h264 profile");
201
+        qmin_cid = MPEG_CID(H264_MIN_QP);
202
+        qmax_cid = MPEG_CID(H264_MAX_QP);
203
+        qmin = 0;
204
+        qmax = 51;
205
+        break;
206
+    case AV_CODEC_ID_MPEG4:
207
+        val = v4l2_mpeg4_profile_from_ff(avctx->profile);
208
+        if (val < 0)
209
+            av_log(avctx, AV_LOG_WARNING, "mpeg4 profile not found\n");
210
+        else
211
+            v4l2_set_ext_ctrl(s, MPEG_CID(MPEG4_PROFILE), val, "mpeg4 profile");
212
+        qmin_cid = MPEG_CID(MPEG4_MIN_QP);
213
+        qmax_cid = MPEG_CID(MPEG4_MAX_QP);
214
+        if (avctx->flags & CODEC_FLAG_QPEL)
215
+            v4l2_set_ext_ctrl(s, MPEG_CID(MPEG4_QPEL), 1, "qpel");
216
+        qmin = 1;
217
+        qmax = 31;
218
+        break;
219
+    case AV_CODEC_ID_H263:
220
+        qmin_cid = MPEG_CID(H263_MIN_QP);
221
+        qmax_cid = MPEG_CID(H263_MAX_QP);
222
+        qmin = 1;
223
+        qmax = 31;
224
+        break;
225
+    case AV_CODEC_ID_VP8:
226
+        qmin_cid = MPEG_CID(VPX_MIN_QP);
227
+        qmax_cid = MPEG_CID(VPX_MAX_QP);
228
+        qmin = 0;
229
+        qmax = 127;
230
+        break;
231
+    case AV_CODEC_ID_VP9:
232
+        qmin_cid = MPEG_CID(VPX_MIN_QP);
233
+        qmax_cid = MPEG_CID(VPX_MAX_QP);
234
+        qmin = 0;
235
+        qmax = 255;
236
+        break;
237
+    default:
238
+        return 0;
239
+    }
240
+
241
+    if (qmin != avctx->qmin || qmax != avctx->qmax)
242
+        av_log(avctx, AV_LOG_WARNING, "Encoder adjusted: qmin (%d), qmax (%d)\n", qmin, qmax);
243
+
244
+    v4l2_set_ext_ctrl(s, qmin_cid, qmin, "minimum video quantizer scale");
245
+    v4l2_set_ext_ctrl(s, qmax_cid, qmax, "maximum video quantizer scale");
246
+
247
+    return 0;
248
+}
249
+
250
+static int v4l2_send_frame(AVCodecContext *avctx, const AVFrame *frame)
251
+{
252
+    V4L2m2mContext *s = avctx->priv_data;
253
+    V4L2Context *const output = &s->output;
254
+
255
+    return ff_v4l2_context_enqueue_frame(output, frame);
256
+}
257
+
258
+static int v4l2_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
259
+{
260
+    V4L2m2mContext *s = avctx->priv_data;
261
+    V4L2Context *const capture = &s->capture;
262
+    V4L2Context *const output = &s->output;
263
+    int ret;
264
+
265
+    if (s->draining)
266
+        goto dequeue;
267
+
268
+    if (!output->streamon) {
269
+        ret = ff_v4l2_context_set_status(output, VIDIOC_STREAMON);
270
+        if (ret) {
271
+            av_log(avctx, AV_LOG_ERROR, "VIDIOC_STREAMOFF failed on output context\n");
272
+            return ret;
273
+        }
274
+    }
275
+
276
+    if (!capture->streamon) {
277
+        ret = ff_v4l2_context_set_status(capture, VIDIOC_STREAMON);
278
+        if (ret) {
279
+            av_log(avctx, AV_LOG_ERROR, "VIDIOC_STREAMON failed on capture context\n");
280
+            return ret;
281
+        }
282
+    }
283
+
284
+dequeue:
285
+    return ff_v4l2_context_dequeue_packet(capture, avpkt);
286
+}
287
+
288
+static av_cold int v4l2_encode_init(AVCodecContext *avctx)
289
+{
290
+    V4L2m2mContext *s = avctx->priv_data;
291
+    V4L2Context *capture = &s->capture;
292
+    V4L2Context *output = &s->output;
293
+    int ret;
294
+
295
+    /* common settings output/capture */
296
+    output->height = capture->height = avctx->height;
297
+    output->width = capture->width = avctx->width;
298
+
299
+    /* output context */
300
+    output->av_codec_id = AV_CODEC_ID_RAWVIDEO;
301
+    output->av_pix_fmt = avctx->pix_fmt;
302
+
303
+    /* capture context */
304
+    capture->av_codec_id = avctx->codec_id;
305
+    capture->av_pix_fmt = AV_PIX_FMT_NONE;
306
+
307
+    ret = ff_v4l2_m2m_codec_init(avctx);
308
+    if (ret) {
309
+        av_log(avctx, AV_LOG_ERROR, "can't configure encoder\n");
310
+        return ret;
311
+    }
312
+
313
+    return v4l2_prepare_encoder(s);
314
+}
315
+
316
+#define OFFSET(x) offsetof(V4L2m2mContext, x)
317
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
318
+
319
+static const AVOption options[] = {
320
+    V4L_M2M_DEFAULT_OPTS,
321
+    { "num_capture_buffers", "Number of buffers in the capture context",
322
+        OFFSET(capture.num_buffers), AV_OPT_TYPE_INT, {.i64 = 4 }, 4, INT_MAX, FLAGS },
323
+    { NULL },
324
+};
325
+
326
+#define M2MENC(NAME, LONGNAME, CODEC) \
327
+static const AVClass v4l2_m2m_ ## NAME ## _enc_class = {\
328
+    .class_name = #NAME "_v4l2_m2m_encoder",\
329
+    .item_name  = av_default_item_name,\
330
+    .option     = options,\
331
+    .version    = LIBAVUTIL_VERSION_INT,\
332
+};\
333
+\
334
+AVCodec ff_ ## NAME ## _v4l2m2m_encoder = { \
335
+    .name           = #NAME "_v4l2m2m" ,\
336
+    .long_name      = NULL_IF_CONFIG_SMALL("V4L2 mem2mem " LONGNAME " encoder wrapper"),\
337
+    .type           = AVMEDIA_TYPE_VIDEO,\
338
+    .id             = CODEC ,\
339
+    .priv_data_size = sizeof(V4L2m2mContext),\
340
+    .priv_class     = &v4l2_m2m_ ## NAME ##_enc_class,\
341
+    .init           = v4l2_encode_init,\
342
+    .send_frame     = v4l2_send_frame,\
343
+    .receive_packet = v4l2_receive_packet,\
344
+    .close          = ff_v4l2_m2m_codec_end,\
345
+};
346
+
347
+M2MENC(mpeg4,"MPEG4", AV_CODEC_ID_MPEG4);
348
+M2MENC(h263, "H.263", AV_CODEC_ID_H263);
349
+M2MENC(h264, "H.264", AV_CODEC_ID_H264);
350
+M2MENC(hevc, "HEVC",  AV_CODEC_ID_HEVC);
351
+M2MENC(vp8,  "VP8",   AV_CODEC_ID_VP8);