Signed-off-by: Thilo Borgmann <thilo.borgmann@mail.de>
Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
... | ... |
@@ -255,6 +255,7 @@ External library support: |
255 | 255 |
--enable-opencl enable OpenCL code |
256 | 256 |
--enable-opengl enable OpenGL rendering [no] |
257 | 257 |
--enable-openssl enable openssl [no] |
258 |
+ --disable-qtkit disable QTKit input device support [autodetect] |
|
258 | 259 |
--enable-x11grab enable X11 grabbing [no] |
259 | 260 |
--disable-xlib disable xlib [autodetect] |
260 | 261 |
--disable-zlib disable zlib [autodetect] |
... | ... |
@@ -2360,6 +2361,8 @@ oss_indev_deps_any="soundcard_h sys_soundcard_h" |
2360 | 2360 |
oss_outdev_deps_any="soundcard_h sys_soundcard_h" |
2361 | 2361 |
pulse_indev_deps="libpulse" |
2362 | 2362 |
pulse_outdev_deps="libpulse" |
2363 |
+qtkit_indev_extralibs="-framework QTKit -framework Foundation -framework QuartzCore" |
|
2364 |
+qtkit_indev_select="qtkit" |
|
2363 | 2365 |
sdl_outdev_deps="sdl" |
2364 | 2366 |
sndio_indev_deps="sndio_h" |
2365 | 2367 |
sndio_outdev_deps="sndio_h" |
... | ... |
@@ -4643,6 +4646,7 @@ enabled openssl && { check_lib openssl/ssl.h SSL_library_init -lssl -l |
4643 | 4643 |
check_lib openssl/ssl.h SSL_library_init -lssl32 -leay32 || |
4644 | 4644 |
check_lib openssl/ssl.h SSL_library_init -lssl -lcrypto -lws2_32 -lgdi32 || |
4645 | 4645 |
die "ERROR: openssl not found"; } |
4646 |
+enabled qtkit_indev && { check_header QTKit/QTKit.h || disable qtkit_indev; } |
|
4646 | 4647 |
|
4647 | 4648 |
if enabled gnutls; then |
4648 | 4649 |
{ check_lib nettle/bignum.h nettle_mpz_get_str_256 -lnettle -lhogweed -lgmp && enable nettle; } || |
... | ... |
@@ -531,6 +531,16 @@ Record a stream from default device: |
531 | 531 |
ffmpeg -f pulse -i default /tmp/pulse.wav |
532 | 532 |
@end example |
533 | 533 |
|
534 |
+@section qtkit |
|
535 |
+ |
|
536 |
+QTKit input device. |
|
537 |
+ |
|
538 |
+The filename passed as input is unused. The default device will be chosen. |
|
539 |
+ |
|
540 |
+@example |
|
541 |
+ffmpeg -f isight -i "" out.mpg |
|
542 |
+@end example |
|
543 |
+ |
|
534 | 544 |
@section sndio |
535 | 545 |
|
536 | 546 |
sndio input device. |
... | ... |
@@ -37,6 +37,7 @@ OBJS-$(CONFIG_PULSE_INDEV) += pulse_audio_dec.o \ |
37 | 37 |
pulse_audio_common.o |
38 | 38 |
OBJS-$(CONFIG_PULSE_OUTDEV) += pulse_audio_enc.o \ |
39 | 39 |
pulse_audio_common.o |
40 |
+OBJS-$(CONFIG_QTKIT_INDEV) += qtkit.o |
|
40 | 41 |
OBJS-$(CONFIG_SDL_OUTDEV) += sdl.o |
41 | 42 |
OBJS-$(CONFIG_SNDIO_INDEV) += sndio_common.o sndio_dec.o |
42 | 43 |
OBJS-$(CONFIG_SNDIO_OUTDEV) += sndio_common.o sndio_enc.o |
... | ... |
@@ -60,6 +60,7 @@ void avdevice_register_all(void) |
60 | 60 |
REGISTER_OUTDEV (OPENGL, opengl); |
61 | 61 |
REGISTER_INOUTDEV(OSS, oss); |
62 | 62 |
REGISTER_INOUTDEV(PULSE, pulse); |
63 |
+ REGISTER_INDEV (QTKIT, qtkit); |
|
63 | 64 |
REGISTER_OUTDEV (SDL, sdl); |
64 | 65 |
REGISTER_INOUTDEV(SNDIO, sndio); |
65 | 66 |
REGISTER_INOUTDEV(V4L2, v4l2); |
66 | 67 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,306 @@ |
0 |
+/* |
|
1 |
+ * QTKit input device |
|
2 |
+ * Copyright (c) 2013 Vadim Kalinsky <vadim@kalinsky.ru> |
|
3 |
+ * |
|
4 |
+ * This file is part of FFmpeg. |
|
5 |
+ * |
|
6 |
+ * FFmpeg is free software; you can redistribute it and/or |
|
7 |
+ * modify it under the terms of the GNU Lesser General Public |
|
8 |
+ * License as published by the Free Software Foundation; either |
|
9 |
+ * version 2.1 of the License, or (at your option) any later version. |
|
10 |
+ * |
|
11 |
+ * FFmpeg is distributed in the hope that it will be useful, |
|
12 |
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
13 |
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
|
14 |
+ * Lesser General Public License for more details. |
|
15 |
+ * |
|
16 |
+ * You should have received a copy of the GNU Lesser General Public |
|
17 |
+ * License along with FFmpeg; if not, write to the Free Software |
|
18 |
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
19 |
+ */ |
|
20 |
+ |
|
21 |
+/** |
|
22 |
+ * @file |
|
23 |
+ * QTKit input device |
|
24 |
+ * @author Vadim Kalinsky <vadim@kalinsky.ru> |
|
25 |
+ */ |
|
26 |
+ |
|
27 |
+#import <QTKit/QTkit.h> |
|
28 |
+#include <pthread.h> |
|
29 |
+ |
|
30 |
+#include "libavutil/pixdesc.h" |
|
31 |
+#include "libavutil/opt.h" |
|
32 |
+#include "libavformat/internal.h" |
|
33 |
+#include "libavutil/internal.h" |
|
34 |
+#include "libavutil/time.h" |
|
35 |
+#include "avdevice.h" |
|
36 |
+ |
|
37 |
+static const int kQTKitTimeBase = 100; |
|
38 |
+ |
|
39 |
+static const AVRational kQTKitTimeBase_q = { |
|
40 |
+ .num = 1, |
|
41 |
+ .den = kQTKitTimeBase |
|
42 |
+}; |
|
43 |
+ |
|
44 |
+typedef struct |
|
45 |
+{ |
|
46 |
+ AVClass* class; |
|
47 |
+ |
|
48 |
+ float frame_rate; |
|
49 |
+ int frames_captured; |
|
50 |
+ int64_t first_pts; |
|
51 |
+ pthread_mutex_t frame_lock; |
|
52 |
+ pthread_cond_t frame_wait_cond; |
|
53 |
+ id qt_delegate; |
|
54 |
+ |
|
55 |
+ QTCaptureSession* capture_session; |
|
56 |
+ QTCaptureDecompressedVideoOutput* video_output; |
|
57 |
+ CVImageBufferRef current_frame; |
|
58 |
+} CaptureContext; |
|
59 |
+ |
|
60 |
+static void lock_frames(CaptureContext* ctx) |
|
61 |
+{ |
|
62 |
+ pthread_mutex_lock(&ctx->frame_lock); |
|
63 |
+} |
|
64 |
+ |
|
65 |
+static void unlock_frames(CaptureContext* ctx) |
|
66 |
+{ |
|
67 |
+ pthread_mutex_unlock(&ctx->frame_lock); |
|
68 |
+} |
|
69 |
+ |
|
70 |
+/** FrameReciever class - delegate for QTCaptureSession |
|
71 |
+ */ |
|
72 |
+@interface FFMPEG_FrameReceiver : NSObject |
|
73 |
+{ |
|
74 |
+ CaptureContext* _context; |
|
75 |
+} |
|
76 |
+ |
|
77 |
+- (id)initWithContext:(CaptureContext*)context; |
|
78 |
+ |
|
79 |
+- (void)captureOutput:(QTCaptureOutput *)captureOutput |
|
80 |
+ didOutputVideoFrame:(CVImageBufferRef)videoFrame |
|
81 |
+ withSampleBuffer:(QTSampleBuffer *)sampleBuffer |
|
82 |
+ fromConnection:(QTCaptureConnection *)connection; |
|
83 |
+ |
|
84 |
+@end |
|
85 |
+ |
|
86 |
+@implementation FFMPEG_FrameReceiver |
|
87 |
+ |
|
88 |
+- (id)initWithContext:(CaptureContext*)context |
|
89 |
+{ |
|
90 |
+ if (self = [super init]) { |
|
91 |
+ _context = context; |
|
92 |
+ } |
|
93 |
+ return self; |
|
94 |
+} |
|
95 |
+ |
|
96 |
+- (void)captureOutput:(QTCaptureOutput *)captureOutput |
|
97 |
+ didOutputVideoFrame:(CVImageBufferRef)videoFrame |
|
98 |
+ withSampleBuffer:(QTSampleBuffer *)sampleBuffer |
|
99 |
+ fromConnection:(QTCaptureConnection *)connection |
|
100 |
+{ |
|
101 |
+ lock_frames(_context); |
|
102 |
+ if (_context->current_frame != nil) { |
|
103 |
+ CVBufferRelease(_context->current_frame); |
|
104 |
+ } |
|
105 |
+ |
|
106 |
+ _context->current_frame = CVBufferRetain(videoFrame); |
|
107 |
+ |
|
108 |
+ pthread_cond_signal(&_context->frame_wait_cond); |
|
109 |
+ |
|
110 |
+ unlock_frames(_context); |
|
111 |
+ |
|
112 |
+ ++_context->frames_captured; |
|
113 |
+} |
|
114 |
+ |
|
115 |
+@end |
|
116 |
+ |
|
117 |
+static void destroy_context(CaptureContext* ctx) |
|
118 |
+{ |
|
119 |
+ [ctx->capture_session stopRunning]; |
|
120 |
+ |
|
121 |
+ [ctx->capture_session release]; |
|
122 |
+ [ctx->video_output release]; |
|
123 |
+ [ctx->qt_delegate release]; |
|
124 |
+ |
|
125 |
+ ctx->capture_session = NULL; |
|
126 |
+ ctx->video_output = NULL; |
|
127 |
+ ctx->qt_delegate = NULL; |
|
128 |
+ |
|
129 |
+ pthread_mutex_destroy(&ctx->frame_lock); |
|
130 |
+ pthread_cond_destroy(&ctx->frame_wait_cond); |
|
131 |
+ |
|
132 |
+ if (ctx->current_frame) |
|
133 |
+ CVBufferRelease(ctx->current_frame); |
|
134 |
+} |
|
135 |
+ |
|
136 |
+static int qtkit_read_header(AVFormatContext *s) |
|
137 |
+{ |
|
138 |
+ NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; |
|
139 |
+ |
|
140 |
+ CaptureContext* ctx = (CaptureContext*)s->priv_data; |
|
141 |
+ |
|
142 |
+ ctx->first_pts = av_gettime(); |
|
143 |
+ |
|
144 |
+ pthread_mutex_init(&ctx->frame_lock, NULL); |
|
145 |
+ pthread_cond_init(&ctx->frame_wait_cond, NULL); |
|
146 |
+ |
|
147 |
+ // Find default capture device |
|
148 |
+ QTCaptureDevice *video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed]; |
|
149 |
+ |
|
150 |
+ BOOL success = [video_device open:nil]; |
|
151 |
+ |
|
152 |
+ // Video capture device not found, looking for QTMediaTypeVideo |
|
153 |
+ if (!success) { |
|
154 |
+ video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo]; |
|
155 |
+ success = [video_device open:nil]; |
|
156 |
+ |
|
157 |
+ if (!success) { |
|
158 |
+ av_log(s, AV_LOG_ERROR, "No QT capture device found\n"); |
|
159 |
+ goto fail; |
|
160 |
+ } |
|
161 |
+ } |
|
162 |
+ |
|
163 |
+ NSString* dev_display_name = [video_device localizedDisplayName]; |
|
164 |
+ av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]); |
|
165 |
+ |
|
166 |
+ // Initialize capture session |
|
167 |
+ ctx->capture_session = [[QTCaptureSession alloc] init]; |
|
168 |
+ |
|
169 |
+ QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease]; |
|
170 |
+ success = [ctx->capture_session addInput:capture_dev_input error:nil]; |
|
171 |
+ |
|
172 |
+ if (!success) { |
|
173 |
+ av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n"); |
|
174 |
+ goto fail; |
|
175 |
+ } |
|
176 |
+ |
|
177 |
+ // Attaching output |
|
178 |
+ // FIXME: Allow for a user defined pixel format |
|
179 |
+ ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init]; |
|
180 |
+ |
|
181 |
+ NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject: |
|
182 |
+ [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB] |
|
183 |
+ forKey:(id)kCVPixelBufferPixelFormatTypeKey]; |
|
184 |
+ |
|
185 |
+ [ctx->video_output setPixelBufferAttributes:captureDictionary]; |
|
186 |
+ |
|
187 |
+ ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx]; |
|
188 |
+ |
|
189 |
+ [ctx->video_output setDelegate:ctx->qt_delegate]; |
|
190 |
+ [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES]; |
|
191 |
+ [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate]; |
|
192 |
+ |
|
193 |
+ success = [ctx->capture_session addOutput:ctx->video_output error:nil]; |
|
194 |
+ |
|
195 |
+ if (!success) { |
|
196 |
+ av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n"); |
|
197 |
+ goto fail; |
|
198 |
+ } |
|
199 |
+ |
|
200 |
+ [ctx->capture_session startRunning]; |
|
201 |
+ |
|
202 |
+ // Take stream info from the first frame. |
|
203 |
+ while (ctx->frames_captured < 1) { |
|
204 |
+ CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES); |
|
205 |
+ } |
|
206 |
+ |
|
207 |
+ lock_frames(ctx); |
|
208 |
+ |
|
209 |
+ AVStream* stream = avformat_new_stream(s, NULL); |
|
210 |
+ |
|
211 |
+ if (!stream) { |
|
212 |
+ goto fail; |
|
213 |
+ } |
|
214 |
+ |
|
215 |
+ avpriv_set_pts_info(stream, 64, 1, kQTKitTimeBase); |
|
216 |
+ |
|
217 |
+ stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO; |
|
218 |
+ stream->codec->codec_type = AVMEDIA_TYPE_VIDEO; |
|
219 |
+ stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame); |
|
220 |
+ stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame); |
|
221 |
+ stream->codec->pix_fmt = AV_PIX_FMT_RGB24; |
|
222 |
+ |
|
223 |
+ CVBufferRelease(ctx->current_frame); |
|
224 |
+ ctx->current_frame = nil; |
|
225 |
+ |
|
226 |
+ unlock_frames(ctx); |
|
227 |
+ |
|
228 |
+ [pool release]; |
|
229 |
+ |
|
230 |
+ return 0; |
|
231 |
+ |
|
232 |
+fail: |
|
233 |
+ [pool release]; |
|
234 |
+ |
|
235 |
+ destroy_context(ctx); |
|
236 |
+ |
|
237 |
+ return AVERROR(EIO); |
|
238 |
+} |
|
239 |
+ |
|
240 |
+static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt) |
|
241 |
+{ |
|
242 |
+ CaptureContext* ctx = (CaptureContext*)s->priv_data; |
|
243 |
+ |
|
244 |
+ do { |
|
245 |
+ lock_frames(ctx); |
|
246 |
+ |
|
247 |
+ if (ctx->current_frame != nil) { |
|
248 |
+ if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) { |
|
249 |
+ return AVERROR(EIO); |
|
250 |
+ } |
|
251 |
+ |
|
252 |
+ pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q); |
|
253 |
+ pkt->stream_index = 0; |
|
254 |
+ pkt->flags |= AV_PKT_FLAG_KEY; |
|
255 |
+ |
|
256 |
+ CVPixelBufferLockBaseAddress(ctx->current_frame, 0); |
|
257 |
+ |
|
258 |
+ void* data = CVPixelBufferGetBaseAddress(ctx->current_frame); |
|
259 |
+ memcpy(pkt->data, data, pkt->size); |
|
260 |
+ |
|
261 |
+ CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0); |
|
262 |
+ CVBufferRelease(ctx->current_frame); |
|
263 |
+ ctx->current_frame = nil; |
|
264 |
+ } else { |
|
265 |
+ pkt->data = NULL; |
|
266 |
+ pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock); |
|
267 |
+ } |
|
268 |
+ |
|
269 |
+ unlock_frames(ctx); |
|
270 |
+ } while (!pkt->data); |
|
271 |
+ |
|
272 |
+ return 0; |
|
273 |
+} |
|
274 |
+ |
|
275 |
+static int qtkit_close(AVFormatContext *s) |
|
276 |
+{ |
|
277 |
+ CaptureContext* ctx = (CaptureContext*)s->priv_data; |
|
278 |
+ |
|
279 |
+ destroy_context(ctx); |
|
280 |
+ |
|
281 |
+ return 0; |
|
282 |
+} |
|
283 |
+ |
|
284 |
+static const AVOption options[] = { |
|
285 |
+ { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL }, |
|
286 |
+ { NULL }, |
|
287 |
+}; |
|
288 |
+ |
|
289 |
+static const AVClass qtkit_class = { |
|
290 |
+ .class_name = "QTKit input device", |
|
291 |
+ .item_name = av_default_item_name, |
|
292 |
+ .option = options, |
|
293 |
+ .version = LIBAVUTIL_VERSION_INT, |
|
294 |
+}; |
|
295 |
+ |
|
296 |
+AVInputFormat ff_qtkit_demuxer = { |
|
297 |
+ .name = "qtkit", |
|
298 |
+ .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"), |
|
299 |
+ .priv_data_size = sizeof(CaptureContext), |
|
300 |
+ .read_header = qtkit_read_header, |
|
301 |
+ .read_packet = qtkit_read_packet, |
|
302 |
+ .read_close = qtkit_close, |
|
303 |
+ .flags = AVFMT_NOFILE, |
|
304 |
+ .priv_class = &qtkit_class, |
|
305 |
+}; |