Browse code

lavfi: Add coreimage filter for GPU based image filtering on OSX.

Thilo Borgmann authored on 2016/03/20 00:28:40
Showing 8 changed files
... ...
@@ -15,6 +15,7 @@ version <next>:
15 15
 - VC-2 HQ RTP payload format (draft v1) depacketizer
16 16
 - AudioToolbox audio decoders
17 17
 - AudioToolbox audio encoders
18
+- coreimage filter (GPU based image filtering on OSX)
18 19
 
19 20
 
20 21
 version 3.0:
... ...
@@ -370,6 +370,7 @@ Filters:
370 370
   vf_colorbalance.c                     Paul B Mahol
371 371
   vf_colorkey.c                         Timo Rothenpieler
372 372
   vf_colorlevels.c                      Paul B Mahol
373
+  vf_coreimage.m                        Thilo Borgmann
373 374
   vf_deband.c                           Paul B Mahol
374 375
   vf_dejudder.c                         Nicholas Robbins
375 376
   vf_delogo.c                           Jean Delvare (CC <jdelvare@suse.com>)
... ...
@@ -5287,6 +5287,8 @@ frei0r_filter_extralibs='$ldl'
5287 5287
 frei0r_src_filter_extralibs='$ldl'
5288 5288
 ladspa_filter_extralibs='$ldl'
5289 5289
 nvenc_encoder_extralibs='$ldl'
5290
+coreimage_filter_extralibs="-framework QuartzCore -framework AppKit -framework OpenGL"
5291
+coreimagesrc_filter_extralibs="-framework QuartzCore -framework AppKit -framework OpenGL"
5290 5292
 
5291 5293
 if ! disabled network; then
5292 5294
     check_func getaddrinfo $network_extralibs
... ...
@@ -5516,6 +5518,8 @@ enabled avisynth          && { { check_lib2 "windows.h" LoadLibrary; } ||
5516 5516
                                die "ERROR: LoadLibrary/dlopen not found for avisynth"; }
5517 5517
 enabled cuda              && check_lib cuda.h cuInit -lcuda
5518 5518
 enabled chromaprint       && require chromaprint chromaprint.h chromaprint_get_version -lchromaprint
5519
+enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disable coreimage_filter; }
5520
+enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
5519 5521
 enabled decklink          && { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; }
5520 5522
 enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
5521 5523
 enabled gmp               && require2 gmp gmp.h mpz_export -lgmp
... ...
@@ -4955,6 +4955,111 @@ convolution="-2 -1 0 -1 1 1 0 1 2:-2 -1 0 -1 1 1 0 1 2:-2 -1 0 -1 1 1 0 1 2:-2 -
4955 4955
 Copy the input source unchanged to the output. This is mainly useful for
4956 4956
 testing purposes.
4957 4957
 
4958
+@anchor{coreimage}
4959
+@section coreimage
4960
+Video filtering on GPU using Apple's CoreImage API on OSX.
4961
+
4962
+Hardware acceleration is based on an OpenGL context. Usually, this means it is
4963
+processed by video hardware. However, software-based OpenGL implementations
4964
+exist which means there is no guarantee for hardware processing. It depends on
4965
+the respective OSX.
4966
+
4967
+There are many filters and image generators provided by Apple that come with a
4968
+large variety of options. The filter has to be referenced by its name along
4969
+with its options.
4970
+
4971
+The coreimage filter accepts the following options:
4972
+@table @option
4973
+@item list_filters
4974
+List all available filters and generators along with all their respective
4975
+options as well as possible minimum and maximum values along with the default
4976
+values.
4977
+@example
4978
+list_filters=true
4979
+@end example
4980
+
4981
+@item filter
4982
+Specify all filters by their respective name and options.
4983
+Use @var{list_filters} to determine all valid filter names and options.
4984
+Numerical options are specified by a float value and are automatically clamped
4985
+to their respective value range.  Vector and color options have to be specified
4986
+by a list of space separated float values. Character escaping has to be done.
4987
+A special option name @code{default} is available to use default options for a
4988
+filter.
4989
+
4990
+It is required to specify either @code{default} or at least one of the filter options.
4991
+All omitted options are used with their default values.
4992
+The syntax of the filter string is as follows:
4993
+@example
4994
+filter=<NAME>@@<OPTION>=<VALUE>[@@<OPTION>=<VALUE>][@@...][#<NAME>@@<OPTION>=<VALUE>[@@<OPTION>=<VALUE>][@@...]][#...]
4995
+@end example
4996
+
4997
+@item output_rect
4998
+Specify a rectangle where the output of the filter chain is copied into the
4999
+input image. It is given by a list of space separated float values:
5000
+@example
5001
+output_rect=x\ y\ width\ height
5002
+@end example
5003
+If not given, the output rectangle equals the dimensions of the input image.
5004
+The output rectangle is automatically cropped at the borders of the input
5005
+image. Negative values are valid for each component.
5006
+@example
5007
+output_rect=25\ 25\ 100\ 100
5008
+@end example
5009
+@end table
5010
+
5011
+Several filters can be chained for successive processing without GPU-HOST
5012
+transfers allowing for fast processing of complex filter chains.
5013
+Currently, only filters with zero (generators) or exactly one (filters) input
5014
+image and one output image are supported. Also, transition filters are not yet
5015
+usable as intended.
5016
+
5017
+Some filters generate output images with additional padding depending on the
5018
+respective filter kernel. The padding is automatically removed to ensure the
5019
+filter output has the same size as the input image.
5020
+
5021
+For image generators, the size of the output image is determined by the
5022
+previous output image of the filter chain or the input image of the whole
5023
+filterchain, respectively. The generators do not use the pixel information of
5024
+this image to generate their output. However, the generated output is
5025
+blended onto this image, resulting in partial or complete coverage of the
5026
+output image.
5027
+
5028
+The @ref{coreimagesrc} video source can be used for generating input images
5029
+which are directly fed into the filter chain. By using it, providing input
5030
+images by another video source or an input video is not required.
5031
+
5032
+@subsection Examples
5033
+
5034
+@itemize
5035
+
5036
+@item
5037
+List all filters available:
5038
+@example
5039
+coreimage=list_filters=true
5040
+@end example
5041
+
5042
+@item
5043
+Use the CIBoxBlur filter with default options to blur an image:
5044
+@example
5045
+coreimage=filter=CIBoxBlur@@default
5046
+@end example
5047
+
5048
+@item
5049
+Use a filter chain with CISepiaTone at default values and CIVignetteEffect with
5050
+its center at 100x100 and a radius of 50 pixels:
5051
+@example
5052
+coreimage=filter=CIBoxBlur@@default#CIVignetteEffect@@inputCenter=100\ 100@@inputRadius=50
5053
+@end example
5054
+
5055
+@item
5056
+Use nullsrc and CIQRCodeGenerator to create a QR code for the FFmpeg homepage,
5057
+given as complete and escaped command-line for Apple's standard bash shell:
5058
+@example
5059
+ffmpeg -f lavfi -i nullsrc=s=100x100,coreimage=filter=CIQRCodeGenerator@@inputMessage=https\\\\\://FFmpeg.org/@@inputCorrectionLevel=H -frames:v 1 QRCode.png
5060
+@end example
5061
+@end itemize
5062
+
4958 5063
 @section crop
4959 5064
 
4960 5065
 Crop the input video to given dimensions.
... ...
@@ -13798,6 +13903,67 @@ cellauto=p='@@@@ @@ @@@@':s=100x400:full=0:rule=18
13798 13798
 
13799 13799
 @end itemize
13800 13800
 
13801
+@anchor{coreimagesrc}
13802
+@section coreimagesrc
13803
+Video source generated on GPU using Apple's CoreImage API on OSX.
13804
+
13805
+This video source is a specialized version of the @ref{coreimage} video filter.
13806
+Use a core image generator at the beginning of the applied filterchain to
13807
+generate the content.
13808
+
13809
+The coreimagesrc video source accepts the following options:
13810
+@table @option
13811
+@item list_generators
13812
+List all available generators along with all their respective options as well as
13813
+possible minimum and maximum values along with the default values.
13814
+@example
13815
+list_generators=true
13816
+@end example
13817
+
13818
+@item size, s
13819
+Specify the size of the sourced video. For the syntax of this option, check the
13820
+@ref{video size syntax,,"Video size" section in the ffmpeg-utils manual,ffmpeg-utils}.
13821
+The default value is @code{320x240}.
13822
+
13823
+@item rate, r
13824
+Specify the frame rate of the sourced video, as the number of frames
13825
+generated per second. It has to be a string in the format
13826
+@var{frame_rate_num}/@var{frame_rate_den}, an integer number, a floating point
13827
+number or a valid video frame rate abbreviation. The default value is
13828
+"25".
13829
+
13830
+@item sar
13831
+Set the sample aspect ratio of the sourced video.
13832
+
13833
+@item duration, d
13834
+Set the duration of the sourced video. See
13835
+@ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1) manual,ffmpeg-utils}
13836
+for the accepted syntax.
13837
+
13838
+If not specified, or the expressed duration is negative, the video is
13839
+supposed to be generated forever.
13840
+@end table
13841
+
13842
+Additionally, all options of the @ref{coreimage} video filter are accepted.
13843
+A complete filterchain can be used for further processing of the
13844
+generated input without CPU-HOST transfer. See @ref{coreimage} documentation
13845
+and examples for details.
13846
+
13847
+@subsection Examples
13848
+
13849
+@itemize
13850
+
13851
+@item
13852
+Use CIQRCodeGenerator to create a QR code for the FFmpeg homepage,
13853
+given as complete and escaped command-line for Apple's standard bash shell:
13854
+@example
13855
+ffmpeg -f lavfi -i coreimagesrc=s=100x100:filter=CIQRCodeGenerator@@inputMessage=https\\\\\://FFmpeg.org/@@inputCorrectionLevel=H -frames:v 1 QRCode.png
13856
+@end example
13857
+This example is equivalent to the QRCode example of @ref{coreimage} without the
13858
+need for a nullsrc video source.
13859
+@end itemize
13860
+
13861
+
13801 13862
 @section mandelbrot
13802 13863
 
13803 13864
 Generate a Mandelbrot set fractal, and progressively zoom towards the
... ...
@@ -133,6 +133,7 @@ OBJS-$(CONFIG_COLORLEVELS_FILTER)            += vf_colorlevels.o
133 133
 OBJS-$(CONFIG_COLORMATRIX_FILTER)            += vf_colormatrix.o
134 134
 OBJS-$(CONFIG_CONVOLUTION_FILTER)            += vf_convolution.o
135 135
 OBJS-$(CONFIG_COPY_FILTER)                   += vf_copy.o
136
+OBJS-$(CONFIG_COREIMAGE_FILTER)              += vf_coreimage.o
136 137
 OBJS-$(CONFIG_COVER_RECT_FILTER)             += vf_cover_rect.o lavfutils.o
137 138
 OBJS-$(CONFIG_CROP_FILTER)                   += vf_crop.o
138 139
 OBJS-$(CONFIG_CROPDETECT_FILTER)             += vf_cropdetect.o
... ...
@@ -282,6 +283,7 @@ OBJS-$(CONFIG_ALLRGB_FILTER)                 += vsrc_testsrc.o
282 282
 OBJS-$(CONFIG_ALLYUV_FILTER)                 += vsrc_testsrc.o
283 283
 OBJS-$(CONFIG_CELLAUTO_FILTER)               += vsrc_cellauto.o
284 284
 OBJS-$(CONFIG_COLOR_FILTER)                  += vsrc_testsrc.o
285
+OBJS-$(CONFIG_COREIMAGESRC_FILTER)           += vf_coreimage.o
285 286
 OBJS-$(CONFIG_FREI0R_SRC_FILTER)             += vf_frei0r.o
286 287
 OBJS-$(CONFIG_HALDCLUTSRC_FILTER)            += vsrc_testsrc.o
287 288
 OBJS-$(CONFIG_LIFE_FILTER)                   += vsrc_life.o
... ...
@@ -154,6 +154,7 @@ void avfilter_register_all(void)
154 154
     REGISTER_FILTER(COLORMATRIX,    colormatrix,    vf);
155 155
     REGISTER_FILTER(CONVOLUTION,    convolution,    vf);
156 156
     REGISTER_FILTER(COPY,           copy,           vf);
157
+    REGISTER_FILTER(COREIMAGE,      coreimage,      vf);
157 158
     REGISTER_FILTER(COVER_RECT,     cover_rect,     vf);
158 159
     REGISTER_FILTER(CROP,           crop,           vf);
159 160
     REGISTER_FILTER(CROPDETECT,     cropdetect,     vf);
... ...
@@ -302,6 +303,7 @@ void avfilter_register_all(void)
302 302
     REGISTER_FILTER(ALLYUV,         allyuv,         vsrc);
303 303
     REGISTER_FILTER(CELLAUTO,       cellauto,       vsrc);
304 304
     REGISTER_FILTER(COLOR,          color,          vsrc);
305
+    REGISTER_FILTER(COREIMAGESRC,   coreimagesrc,   vsrc);
305 306
     REGISTER_FILTER(FREI0R,         frei0r_src,     vsrc);
306 307
     REGISTER_FILTER(HALDCLUTSRC,    haldclutsrc,    vsrc);
307 308
     REGISTER_FILTER(LIFE,           life,           vsrc);
... ...
@@ -30,7 +30,7 @@
30 30
 #include "libavutil/version.h"
31 31
 
32 32
 #define LIBAVFILTER_VERSION_MAJOR   6
33
-#define LIBAVFILTER_VERSION_MINOR  39
33
+#define LIBAVFILTER_VERSION_MINOR  40
34 34
 #define LIBAVFILTER_VERSION_MICRO 102
35 35
 
36 36
 #define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
37 37
new file mode 100644
... ...
@@ -0,0 +1,688 @@
0
+/*
1
+ * Copyright (c) 2016 Thilo Borgmann
2
+ *
3
+ * This file is part of FFmpeg.
4
+ *
5
+ * FFmpeg is free software; you can redistribute it and/or
6
+ * modify it under the terms of the GNU Lesser General Public
7
+ * License as published by the Free Software Foundation; either
8
+ * version 2.1 of the License, or (at your option) any later version.
9
+ *
10
+ * FFmpeg is distributed in the hope that it will be useful,
11
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13
+ * Lesser General Public License for more details.
14
+ *
15
+ * You should have received a copy of the GNU Lesser General Public
16
+ * License along with FFmpeg; if not, write to the Free Software
17
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
+ */
19
+
20
+/**
21
+ * @file
22
+ * Video processing based on Apple's CoreImage API
23
+ */
24
+
25
+#import <QuartzCore/CoreImage.h>
26
+#import <AppKit/AppKit.h>
27
+
28
+#include "avfilter.h"
29
+#include "formats.h"
30
+#include "internal.h"
31
+#include "video.h"
32
+#include "libavutil/internal.h"
33
+#include "libavutil/opt.h"
34
+#include "libavutil/pixdesc.h"
35
+
36
+typedef struct CoreImageContext {
37
+    const AVClass   *class;
38
+
39
+    int             is_video_source;    ///< filter is used as video source
40
+
41
+    int             w, h;               ///< video size
42
+    AVRational      sar;                ///< sample aspect ratio
43
+    AVRational      frame_rate;         ///< video frame rate
44
+    AVRational      time_base;          ///< stream time base
45
+    int64_t         duration;           ///< duration expressed in microseconds
46
+    int64_t         pts;                ///< increasing presentation time stamp
47
+    AVFrame         *picref;            ///< cached reference containing the painted picture
48
+
49
+    CFTypeRef       glctx;              ///< OpenGL context
50
+    CGContextRef    cgctx;              ///< Bitmap context for image copy
51
+    CFTypeRef       input_image;        ///< Input image container for passing into Core Image API
52
+    CGColorSpaceRef color_space;        ///< Common color space for input image and cgcontext
53
+    int             bits_per_component; ///< Shared bpc for input-output operation
54
+
55
+    char            *filter_string;     ///< The complete user provided filter definition
56
+    CFTypeRef       *filters;           ///< CIFilter object for all requested filters
57
+    int             num_filters;        ///< Amount of filters in *filters
58
+
59
+    char            *output_rect;       ///< Rectangle to be filled with filter intput
60
+    int             list_filters;       ///< Option used to list all available filters including generators
61
+    int             list_generators;    ///< Option used to list all available generators
62
+} CoreImageContext;
63
+
64
+static int config_output(AVFilterLink *link)
65
+{
66
+    CoreImageContext *ctx = link->src->priv;
67
+
68
+    link->w                   = ctx->w;
69
+    link->h                   = ctx->h;
70
+    link->sample_aspect_ratio = ctx->sar;
71
+    link->frame_rate          = ctx->frame_rate;
72
+    link->time_base           = ctx->time_base;
73
+
74
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
75
+    ctx->bits_per_component        = av_get_bits_per_pixel(desc) / desc->nb_components;
76
+
77
+    return 0;
78
+}
79
+
80
+/** Determine image properties from input link of filter chain.
81
+ */
82
+static int config_input(AVFilterLink *link)
83
+{
84
+    CoreImageContext *ctx          = link->dst->priv;
85
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
86
+    ctx->bits_per_component        = av_get_bits_per_pixel(desc) / desc->nb_components;
87
+
88
+    return 0;
89
+}
90
+
91
+/** Print a list of all available filters including options and respective value ranges and defaults.
92
+ */
93
+static void list_filters(CoreImageContext *ctx)
94
+{
95
+    // querying filters and attributes
96
+    NSArray *filter_categories = nil;
97
+
98
+    if (ctx->list_generators && !ctx->list_filters) {
99
+        filter_categories = [NSArray arrayWithObjects:kCICategoryGenerator, nil];
100
+    }
101
+
102
+    NSArray *filter_names = [CIFilter filterNamesInCategories:filter_categories];
103
+    NSEnumerator *filters = [filter_names objectEnumerator];
104
+
105
+    NSString *filter_name;
106
+    while (filter_name = [filters nextObject]) {
107
+        av_log(ctx, AV_LOG_INFO, "Filter: %s\n", [filter_name UTF8String]);
108
+        NSString *input;
109
+
110
+        CIFilter *filter             = [CIFilter filterWithName:filter_name];
111
+        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
112
+        NSArray      *filter_inputs  = [filter inputKeys];  // <nsstring>
113
+
114
+        for (input in filter_inputs) {
115
+            NSDictionary *input_attribs = [filter_attribs valueForKey:input];
116
+            NSString *input_class       = [input_attribs valueForKey:kCIAttributeClass];
117
+            if ([input_class isEqualToString:@"NSNumber"]) {
118
+                NSNumber *value_default = [input_attribs valueForKey:kCIAttributeDefault];
119
+                NSNumber *value_min     = [input_attribs valueForKey:kCIAttributeSliderMin];
120
+                NSNumber *value_max     = [input_attribs valueForKey:kCIAttributeSliderMax];
121
+
122
+                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\t[%s %s][%s]\n",
123
+                    [input UTF8String],
124
+                    [input_class UTF8String],
125
+                    [[value_min stringValue] UTF8String],
126
+                    [[value_max stringValue] UTF8String],
127
+                    [[value_default stringValue] UTF8String]);
128
+            } else {
129
+                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\n",
130
+                    [input UTF8String],
131
+                    [input_class UTF8String]);
132
+            }
133
+        }
134
+    }
135
+}
136
+
137
+static int query_formats(AVFilterContext *fctx)
138
+{
139
+    static const enum AVPixelFormat inout_fmts_rgb[] = {
140
+        AV_PIX_FMT_ARGB,
141
+        AV_PIX_FMT_NONE
142
+    };
143
+
144
+    AVFilterFormats *inout_formats;
145
+    int ret;
146
+
147
+    if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
148
+        return AVERROR(ENOMEM);
149
+    }
150
+
151
+    if ((ret = ff_formats_ref(inout_formats, &fctx->inputs[0]->out_formats)) < 0 ||
152
+        (ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
153
+        return ret;
154
+    }
155
+
156
+    return 0;
157
+}
158
+
159
+static int query_formats_src(AVFilterContext *fctx)
160
+{
161
+    static const enum AVPixelFormat inout_fmts_rgb[] = {
162
+        AV_PIX_FMT_ARGB,
163
+        AV_PIX_FMT_NONE
164
+    };
165
+
166
+    AVFilterFormats *inout_formats;
167
+    int ret;
168
+
169
+    if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
170
+        return AVERROR(ENOMEM);
171
+    }
172
+
173
+    if ((ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
174
+        return ret;
175
+    }
176
+
177
+    return 0;
178
+}
179
+
180
+static int apply_filter(CoreImageContext *ctx, AVFilterLink *link, AVFrame *frame)
181
+{
182
+    int i;
183
+
184
+    // (re-)initialize input image
185
+    const CGSize frame_size = {
186
+        frame->width,
187
+        frame->height
188
+    };
189
+
190
+    NSData *data = [NSData dataWithBytesNoCopy:frame->data[0]
191
+                           length:frame->height*frame->linesize[0]
192
+                           freeWhenDone:NO];
193
+
194
+    CIImage *ret = [(__bridge CIImage*)ctx->input_image initWithBitmapData:data
195
+                                                        bytesPerRow:frame->linesize[0]
196
+                                                        size:frame_size
197
+                                                        format:kCIFormatARGB8
198
+                                                        colorSpace:ctx->color_space]; //kCGColorSpaceGenericRGB
199
+    if (!ret) {
200
+        av_log(ctx, AV_LOG_ERROR, "Input image could not be initialized.\n");
201
+        return AVERROR_EXTERNAL;
202
+    }
203
+
204
+    CIFilter *filter       = NULL;
205
+    CIImage *filter_input  = (__bridge CIImage*)ctx->input_image;
206
+    CIImage *filter_output = NULL;
207
+
208
+    // successively apply all filters
209
+    for (i = 0; i < ctx->num_filters; i++) {
210
+        if (i) {
211
+            // set filter input to previous filter output
212
+            filter_input    = [(__bridge CIImage*)ctx->filters[i-1] valueForKey:kCIOutputImageKey];
213
+            CGRect out_rect = [filter_input extent];
214
+            if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
215
+                // do not keep padded image regions after filtering
216
+                out_rect.origin.x    = 0.0f;
217
+                out_rect.origin.y    = 0.0f;
218
+                out_rect.size.width  = frame->width;
219
+                out_rect.size.height = frame->height;
220
+            }
221
+            filter_input = [filter_input imageByCroppingToRect:out_rect];
222
+        }
223
+
224
+        filter = (__bridge CIFilter*)ctx->filters[i];
225
+
226
+        // do not set input image for the first filter if used as video source
227
+        if (!ctx->is_video_source || i) {
228
+            @try {
229
+                [filter setValue:filter_input forKey:kCIInputImageKey];
230
+            } @catch (NSException *exception) {
231
+                if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
232
+                    av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
233
+                    return AVERROR_EXTERNAL;
234
+                } else {
235
+                    av_log(ctx, AV_LOG_WARNING, "Selected filter does not accept an input image.\n");
236
+                }
237
+            }
238
+        }
239
+    }
240
+
241
+    // get output of last filter
242
+    filter_output = [filter valueForKey:kCIOutputImageKey];
243
+
244
+    if (!filter_output) {
245
+        av_log(ctx, AV_LOG_ERROR, "Filter output not available.\n");
246
+        return AVERROR_EXTERNAL;
247
+    }
248
+
249
+    // do not keep padded image regions after filtering
250
+    CGRect out_rect = [filter_output extent];
251
+    if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
252
+        av_log(ctx, AV_LOG_DEBUG, "Cropping output image.\n");
253
+        out_rect.origin.x    = 0.0f;
254
+        out_rect.origin.y    = 0.0f;
255
+        out_rect.size.width  = frame->width;
256
+        out_rect.size.height = frame->height;
257
+    }
258
+
259
+    CGImageRef out = [(__bridge CIContext*)ctx->glctx createCGImage:filter_output
260
+                                                      fromRect:out_rect];
261
+
262
+    if (!out) {
263
+        av_log(ctx, AV_LOG_ERROR, "Cannot create valid output image.\n");
264
+    }
265
+
266
+    // create bitmap context on the fly for rendering into current frame->data[]
267
+    if (ctx->cgctx) {
268
+        CGContextRelease(ctx->cgctx);
269
+        ctx->cgctx = NULL;
270
+    }
271
+    size_t out_width  = CGImageGetWidth(out);
272
+    size_t out_height = CGImageGetHeight(out);
273
+
274
+    if (out_width > frame->width || out_height > frame->height) { // this might result in segfault
275
+        av_log(ctx, AV_LOG_WARNING, "Output image has unexpected size: %lux%lu (expected: %ix%i). This may crash...\n",
276
+               out_width, out_height, frame->width, frame->height);
277
+    }
278
+    ctx->cgctx = CGBitmapContextCreate(frame->data[0],
279
+                                       frame->width,
280
+                                       frame->height,
281
+                                       ctx->bits_per_component,
282
+                                       frame->linesize[0],
283
+                                       ctx->color_space,
284
+                                       (uint32_t)kCGImageAlphaPremultipliedFirst); // ARGB
285
+    if (!ctx->cgctx) {
286
+        av_log(ctx, AV_LOG_ERROR, "CGBitmap context cannot be created.\n");
287
+        return AVERROR_EXTERNAL;
288
+    }
289
+
290
+    // copy ("draw") the output image into the frame data
291
+    CGRect rect = {{0,0},{frame->width, frame->height}};
292
+    if (ctx->output_rect) {
293
+        @try {
294
+            NSString *tmp_string = [NSString stringWithUTF8String:ctx->output_rect];
295
+            NSRect tmp           = NSRectFromString(tmp_string);
296
+            rect                 = NSRectToCGRect(tmp);
297
+        } @catch (NSException *exception) {
298
+            av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
299
+            return AVERROR_EXTERNAL;
300
+        }
301
+        if (rect.size.width == 0.0f) {
302
+            av_log(ctx, AV_LOG_WARNING, "Width of output rect is zero.\n");
303
+        }
304
+        if (rect.size.height == 0.0f) {
305
+            av_log(ctx, AV_LOG_WARNING, "Height of output rect is zero.\n");
306
+        }
307
+    }
308
+
309
+    CGContextDrawImage(ctx->cgctx, rect, out);
310
+
311
+    return ff_filter_frame(link, frame);
312
+}
313
+
314
+/** Apply all valid filters successively to the input image.
315
+ *  The final output image is copied from the GPU by "drawing" using a bitmap context.
316
+ */
317
+static int filter_frame(AVFilterLink *link, AVFrame *frame)
318
+{
319
+    return apply_filter(link->dst->priv, link->dst->outputs[0], frame);
320
+}
321
+
322
+static int request_frame(AVFilterLink *link)
323
+{
324
+    CoreImageContext *ctx = link->src->priv;
325
+    AVFrame *frame;
326
+
327
+    if (ctx->duration >= 0 &&
328
+        av_rescale_q(ctx->pts, ctx->time_base, AV_TIME_BASE_Q) >= ctx->duration) {
329
+        return AVERROR_EOF;
330
+    }
331
+
332
+    if (!ctx->picref) {
333
+        ctx->picref = ff_get_video_buffer(link, ctx->w, ctx->h);
334
+        if (!ctx->picref) {
335
+            return AVERROR(ENOMEM);
336
+        }
337
+    }
338
+
339
+    frame = av_frame_clone(ctx->picref);
340
+    if (!frame) {
341
+        return AVERROR(ENOMEM);
342
+    }
343
+
344
+    frame->pts                 = ctx->pts;
345
+    frame->key_frame           = 1;
346
+    frame->interlaced_frame    = 0;
347
+    frame->pict_type           = AV_PICTURE_TYPE_I;
348
+    frame->sample_aspect_ratio = ctx->sar;
349
+
350
+    ctx->pts++;
351
+
352
+    return apply_filter(ctx, link, frame);
353
+}
354
+
355
+/** Set an option of the given filter to the provided key-value pair.
356
+ */
357
+static void set_option(CoreImageContext *ctx, CIFilter *filter, const char *key, const char *value)
358
+{
359
+        NSString *input_key = [NSString stringWithUTF8String:key];
360
+        NSString *input_val = [NSString stringWithUTF8String:value];
361
+
362
+        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
363
+        NSDictionary *input_attribs  = [filter_attribs valueForKey:input_key];
364
+
365
+        NSString *input_class = [input_attribs valueForKey:kCIAttributeClass];
366
+        NSString *input_type  = [input_attribs valueForKey:kCIAttributeType];
367
+
368
+        if (!input_attribs) {
369
+            av_log(ctx, AV_LOG_WARNING, "Skipping unknown option: \"%s\".\n",
370
+                   [input_key UTF8String]); // [[filter name] UTF8String]) not currently defined...
371
+            return;
372
+        }
373
+
374
+        av_log(ctx, AV_LOG_DEBUG, "key: %s, val: %s, #attribs: %lu, class: %s, type: %s\n",
375
+               [input_key UTF8String],
376
+               [input_val UTF8String],
377
+               input_attribs ? (unsigned long)[input_attribs count] : -1,
378
+               [input_class UTF8String],
379
+               [input_type UTF8String]);
380
+
381
+        if ([input_class isEqualToString:@"NSNumber"]) {
382
+            float input          = input_val.floatValue;
383
+            NSNumber *max_value  = [input_attribs valueForKey:kCIAttributeSliderMax];
384
+            NSNumber *min_value  = [input_attribs valueForKey:kCIAttributeSliderMin];
385
+            NSNumber *used_value = nil;
386
+
387
+#define CLAMP_WARNING do {     \
388
+av_log(ctx, AV_LOG_WARNING, "Value of \"%f\" for option \"%s\" is out of range [%f %f], clamping to \"%f\".\n", \
389
+       input,                  \
390
+       [input_key UTF8String], \
391
+       min_value.floatValue,   \
392
+       max_value.floatValue,   \
393
+       used_value.floatValue); \
394
+} while(0)
395
+            if (input > max_value.floatValue) {
396
+                used_value = max_value;
397
+                CLAMP_WARNING;
398
+            } else if (input < min_value.floatValue) {
399
+                used_value = min_value;
400
+                CLAMP_WARNING;
401
+            } else {
402
+                used_value = [NSNumber numberWithFloat:input];
403
+            }
404
+
405
+            [filter setValue:used_value forKey:input_key];
406
+        } else if ([input_class isEqualToString:@"CIVector"]) {
407
+            CIVector *input = [CIVector vectorWithString:input_val];
408
+
409
+            if (!input) {
410
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIVctor description: \"%s\".\n",
411
+                       [input_val UTF8String]);
412
+                return;
413
+            }
414
+
415
+            [filter setValue:input forKey:input_key];
416
+        } else if ([input_class isEqualToString:@"CIColor"]) {
417
+            CIColor *input = [CIColor colorWithString:input_val];
418
+
419
+            if (!input) {
420
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIColor description: \"%s\".\n",
421
+                       [input_val UTF8String]);
422
+                return;
423
+            }
424
+
425
+            [filter setValue:input forKey:input_key];
426
+        } else if ([input_class isEqualToString:@"NSString"]) { // set display name as string with latin1 encoding
427
+            [filter setValue:input_val forKey:input_key];
428
+        } else if ([input_class isEqualToString:@"NSData"]) { // set display name as string with latin1 encoding
429
+            NSData *input = [NSData dataWithBytes:(const void*)[input_val cStringUsingEncoding:NSISOLatin1StringEncoding]
430
+                                    length:[input_val lengthOfBytesUsingEncoding:NSISOLatin1StringEncoding]];
431
+
432
+            if (!input) {
433
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid NSData description: \"%s\".\n",
434
+                       [input_val UTF8String]);
435
+                return;
436
+            }
437
+
438
+            [filter setValue:input forKey:input_key];
439
+        } else {
440
+            av_log(ctx, AV_LOG_WARNING, "Skipping unsupported option class: \"%s\".\n",
441
+                   [input_class UTF8String]);
442
+            avpriv_report_missing_feature(ctx, "Handling of some option classes");
443
+            return;
444
+        }
445
+}
446
+
447
+/** Create a filter object by a given name and set all options to defaults.
448
+ *  Overwrite any option given by the user to the provided value in filter_options.
449
+ */
450
+static CIFilter* create_filter(CoreImageContext *ctx, const char *filter_name, AVDictionary *filter_options)
451
+{
452
+    // create filter object
453
+    CIFilter *filter = [CIFilter filterWithName:[NSString stringWithUTF8String:filter_name]];
454
+
455
+    // set default options
456
+    [filter setDefaults];
457
+
458
+    // set user options
459
+    if (filter_options) {
460
+        AVDictionaryEntry *o = NULL;
461
+        while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
462
+            set_option(ctx, filter, o->key, o->value);
463
+        }
464
+    }
465
+
466
+    return filter;
467
+}
468
+
469
+static av_cold int init(AVFilterContext *fctx)
470
+{
471
+    CoreImageContext *ctx     = fctx->priv;
472
+    AVDictionary *filter_dict = NULL;
473
+    AVDictionaryEntry *f      = NULL;
474
+    AVDictionaryEntry *o      = NULL;
475
+    int ret;
476
+    int i;
477
+
478
+    if (ctx->list_filters || ctx->list_generators) {
479
+        list_filters(ctx);
480
+        return AVERROR_EXIT;
481
+    }
482
+
483
+    if (ctx->filter_string) {
484
+        // parse filter string (filter=name@opt=val@opt2=val2#name2@opt3=val3) for filters separated by #
485
+        av_log(ctx, AV_LOG_DEBUG, "Filter_string: %s\n", ctx->filter_string);
486
+        ret = av_dict_parse_string(&filter_dict, ctx->filter_string, "@", "#", AV_DICT_MULTIKEY); // parse filter_name:all_filter_options
487
+        if (ret) {
488
+            av_log(ctx, AV_LOG_ERROR, "Parsing of filters failed.\n");
489
+            return AVERROR(EIO);
490
+        }
491
+        ctx->num_filters = av_dict_count(filter_dict);
492
+        av_log(ctx, AV_LOG_DEBUG, "Filter count: %i\n", ctx->num_filters);
493
+
494
+        // allocate CIFilter array
495
+        ctx->filters = av_mallocz_array(ctx->num_filters, sizeof(CIFilter*));
496
+        if (!ctx->filters) {
497
+            av_log(ctx, AV_LOG_ERROR, "Could not allocate filter array.\n");
498
+            return AVERROR(ENOMEM);
499
+        }
500
+
501
+        // parse filters for option key-value pairs (opt=val@opt2=val2) separated by @
502
+        i = 0;
503
+        while ((f = av_dict_get(filter_dict, "", f, AV_DICT_IGNORE_SUFFIX))) {
504
+            AVDictionary *filter_options = NULL;
505
+
506
+            if (strncmp(f->value, "default", 7)) { // not default
507
+                ret = av_dict_parse_string(&filter_options, f->value, "=", "@", 0); // parse option_name:option_value
508
+                if (ret) {
509
+                    av_log(ctx, AV_LOG_ERROR, "Parsing of filter options for \"%s\" failed.\n", f->key);
510
+                    return AVERROR(EIO);
511
+                }
512
+            }
513
+
514
+            if (av_log_get_level() >= AV_LOG_DEBUG) {
515
+                av_log(ctx, AV_LOG_DEBUG, "Creating filter %i: \"%s\":\n", i, f->key);
516
+                if (!filter_options) {
517
+                    av_log(ctx, AV_LOG_DEBUG, "\tusing default options\n");
518
+                } else {
519
+                    while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
520
+                        av_log(ctx, AV_LOG_DEBUG, "\t%s: %s\n", o->key, o->value);
521
+                    }
522
+                }
523
+            }
524
+
525
+            ctx->filters[i] = CFBridgingRetain(create_filter(ctx, f->key, filter_options));
526
+            if (!ctx->filters[i]) {
527
+                av_log(ctx, AV_LOG_ERROR, "Could not create filter \"%s\".\n", f->key);
528
+                return AVERROR(EINVAL);
529
+            }
530
+
531
+            i++;
532
+        }
533
+    } else {
534
+        av_log(ctx, AV_LOG_ERROR, "No filters specified.\n");
535
+        return AVERROR(EINVAL);
536
+    }
537
+
538
+    // create GPU context on OSX
539
+    const NSOpenGLPixelFormatAttribute attr[] = {
540
+        NSOpenGLPFAAccelerated,
541
+        NSOpenGLPFANoRecovery,
542
+        NSOpenGLPFAColorSize, 32,
543
+        0
544
+    };
545
+
546
+    NSOpenGLPixelFormat *pixel_format = [[NSOpenGLPixelFormat alloc] initWithAttributes:(void *)&attr];
547
+    ctx->color_space                  = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
548
+    ctx->glctx                        = CFBridgingRetain([CIContext contextWithCGLContext:CGLGetCurrentContext()
549
+                                                         pixelFormat:[pixel_format CGLPixelFormatObj]
550
+                                                         colorSpace:ctx->color_space
551
+                                                         options:nil]);
552
+
553
+    if (!ctx->glctx) {
554
+        av_log(ctx, AV_LOG_ERROR, "CIContext not created.\n");
555
+        return AVERROR_EXTERNAL;
556
+    }
557
+
558
+    // Creating an empty input image as input container for the context
559
+    ctx->input_image = CFBridgingRetain([CIImage emptyImage]);
560
+
561
+    return 0;
562
+}
563
+
564
+static av_cold int init_src(AVFilterContext *fctx)
565
+{
566
+    CoreImageContext *ctx = fctx->priv;
567
+
568
+    ctx->is_video_source = 1;
569
+    ctx->time_base       = av_inv_q(ctx->frame_rate);
570
+    ctx->pts             = 0;
571
+
572
+    return init(fctx);
573
+}
574
+
575
+static av_cold void uninit(AVFilterContext *fctx)
576
+{
577
+#define SafeCFRelease(ptr) do { \
578
+    if (ptr) {                  \
579
+        CFRelease(ptr);         \
580
+        ptr = NULL;             \
581
+    }                           \
582
+} while (0)
583
+
584
+    CoreImageContext *ctx = fctx->priv;
585
+
586
+    SafeCFRelease(ctx->glctx);
587
+    SafeCFRelease(ctx->cgctx);
588
+    SafeCFRelease(ctx->color_space);
589
+    SafeCFRelease(ctx->input_image);
590
+
591
+    if (ctx->filters) {
592
+        for (int i = 0; i < ctx->num_filters; i++) {
593
+            SafeCFRelease(ctx->filters[i]);
594
+        }
595
+        av_freep(&ctx->filters);
596
+    }
597
+
598
+    av_frame_free(&ctx->picref);
599
+}
600
+
601
+static const AVFilterPad vf_coreimage_inputs[] = {
602
+    {
603
+        .name         = "default",
604
+        .type         = AVMEDIA_TYPE_VIDEO,
605
+        .filter_frame = filter_frame,
606
+        .config_props = config_input,
607
+    },
608
+    { NULL }
609
+};
610
+
611
+static const AVFilterPad vf_coreimage_outputs[] = {
612
+    {
613
+        .name = "default",
614
+        .type = AVMEDIA_TYPE_VIDEO,
615
+    },
616
+    { NULL }
617
+};
618
+
619
+static const AVFilterPad vsrc_coreimagesrc_outputs[] = {
620
+    {
621
+        .name          = "default",
622
+        .type          = AVMEDIA_TYPE_VIDEO,
623
+        .request_frame = request_frame,
624
+        .config_props  = config_output,
625
+    },
626
+    { NULL }
627
+};
628
+
629
+#define OFFSET(x) offsetof(CoreImageContext, x)
630
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
631
+
632
+#define GENERATOR_OPTIONS                                                                                                               \
633
+    {"size",     "set video size",                OFFSET(w),          AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0,         FLAGS}, \
634
+    {"s",        "set video size",                OFFSET(w),          AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0,         FLAGS}, \
635
+    {"rate",     "set video rate",                OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"},      0, 0,         FLAGS}, \
636
+    {"r",        "set video rate",                OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"},      0, 0,         FLAGS}, \
637
+    {"duration", "set video duration",            OFFSET(duration),   AV_OPT_TYPE_DURATION,   {.i64 = -1},       -1, INT64_MAX, FLAGS}, \
638
+    {"d",        "set video duration",            OFFSET(duration),   AV_OPT_TYPE_DURATION,   {.i64 = -1},       -1, INT64_MAX, FLAGS}, \
639
+    {"sar",      "set video sample aspect ratio", OFFSET(sar),        AV_OPT_TYPE_RATIONAL,   {.dbl = 1},         0, INT_MAX,   FLAGS},
640
+
641
+#define FILTER_OPTIONS                                                                                                                           \
642
+    {"list_filters",    "list available filters",                OFFSET(list_filters),    AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, .flags = FLAGS}, \
643
+    {"list_generators", "list available generators",             OFFSET(list_generators), AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, .flags = FLAGS}, \
644
+    {"filter",          "names and options of filters to apply", OFFSET(filter_string),   AV_OPT_TYPE_STRING, {.str = NULL},    .flags = FLAGS}, \
645
+    {"output_rect",     "output rectangle within output image",  OFFSET(output_rect),     AV_OPT_TYPE_STRING, {.str = NULL},    .flags = FLAGS},
646
+
647
+
648
+// definitions for coreimage video filter
649
+static const AVOption coreimage_options[] = {
650
+    FILTER_OPTIONS
651
+    { NULL }
652
+};
653
+
654
+AVFILTER_DEFINE_CLASS(coreimage);
655
+
656
+AVFilter ff_vf_coreimage = {
657
+    .name          = "coreimage",
658
+    .description   = NULL_IF_CONFIG_SMALL("Video filtering using CoreImage API."),
659
+    .init          = init,
660
+    .uninit        = uninit,
661
+    .priv_size     = sizeof(CoreImageContext),
662
+    .priv_class    = &coreimage_class,
663
+    .inputs        = vf_coreimage_inputs,
664
+    .outputs       = vf_coreimage_outputs,
665
+    .query_formats = query_formats,
666
+};
667
+
668
+// definitions for coreimagesrc video source
669
+static const AVOption coreimagesrc_options[] = {
670
+    GENERATOR_OPTIONS
671
+    FILTER_OPTIONS
672
+    { NULL }
673
+};
674
+
675
+AVFILTER_DEFINE_CLASS(coreimagesrc);
676
+
677
+AVFilter ff_vsrc_coreimagesrc = {
678
+    .name          = "coreimagesrc",
679
+    .description   = NULL_IF_CONFIG_SMALL("Video source using image generators of CoreImage API."),
680
+    .init          = init_src,
681
+    .uninit        = uninit,
682
+    .priv_size     = sizeof(CoreImageContext),
683
+    .priv_class    = &coreimagesrc_class,
684
+    .inputs        = NULL,
685
+    .outputs       = vsrc_coreimagesrc_outputs,
686
+    .query_formats = query_formats_src,
687
+};