3 * Copyright (c) 2013 Vadim Kalinsky <vadim@kalinsky.ru>
5 * This file is part of FFmpeg.
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 * @author Vadim Kalinsky <vadim@kalinsky.ru>
28 #import <QTKit/QTKit.h>
31 #include "libavutil/pixdesc.h"
32 #include "libavutil/opt.h"
33 #include "libavformat/internal.h"
34 #include "libavutil/internal.h"
35 #include "libavutil/time.h"
38 #define QTKIT_TIMEBASE 100
40 static const AVRational kQTKitTimeBase_q = {
52 pthread_mutex_t frame_lock;
53 pthread_cond_t frame_wait_cond;
57 int video_device_index;
59 QTCaptureSession* capture_session;
60 QTCaptureDecompressedVideoOutput* video_output;
61 CVImageBufferRef current_frame;
64 static void lock_frames(CaptureContext* ctx)
66 pthread_mutex_lock(&ctx->frame_lock);
69 static void unlock_frames(CaptureContext* ctx)
71 pthread_mutex_unlock(&ctx->frame_lock);
74 /** FrameReciever class - delegate for QTCaptureSession
76 @interface FFMPEG_FrameReceiver : NSObject
78 CaptureContext* _context;
81 - (id)initWithContext:(CaptureContext*)context;
83 - (void)captureOutput:(QTCaptureOutput *)captureOutput
84 didOutputVideoFrame:(CVImageBufferRef)videoFrame
85 withSampleBuffer:(QTSampleBuffer *)sampleBuffer
86 fromConnection:(QTCaptureConnection *)connection;
90 @implementation FFMPEG_FrameReceiver
92 - (id)initWithContext:(CaptureContext*)context
94 if (self = [super init]) {
100 - (void)captureOutput:(QTCaptureOutput *)captureOutput
101 didOutputVideoFrame:(CVImageBufferRef)videoFrame
102 withSampleBuffer:(QTSampleBuffer *)sampleBuffer
103 fromConnection:(QTCaptureConnection *)connection
105 lock_frames(_context);
106 if (_context->current_frame != nil) {
107 CVBufferRelease(_context->current_frame);
110 _context->current_frame = CVBufferRetain(videoFrame);
112 pthread_cond_signal(&_context->frame_wait_cond);
114 unlock_frames(_context);
116 ++_context->frames_captured;
121 static void destroy_context(CaptureContext* ctx)
123 [ctx->capture_session stopRunning];
125 [ctx->capture_session release];
126 [ctx->video_output release];
127 [ctx->qt_delegate release];
129 ctx->capture_session = NULL;
130 ctx->video_output = NULL;
131 ctx->qt_delegate = NULL;
133 pthread_mutex_destroy(&ctx->frame_lock);
134 pthread_cond_destroy(&ctx->frame_wait_cond);
136 if (ctx->current_frame)
137 CVBufferRelease(ctx->current_frame);
140 static int qtkit_read_header(AVFormatContext *s)
142 NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
144 CaptureContext* ctx = (CaptureContext*)s->priv_data;
146 ctx->first_pts = av_gettime();
148 pthread_mutex_init(&ctx->frame_lock, NULL);
149 pthread_cond_init(&ctx->frame_wait_cond, NULL);
151 // List devices if requested
152 if (ctx->list_devices) {
153 av_log(ctx, AV_LOG_INFO, "QTKit video devices:\n");
154 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
155 for (QTCaptureDevice *device in devices) {
156 const char *name = [[device localizedDisplayName] UTF8String];
157 int index = [devices indexOfObject:device];
158 av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
163 // Find capture device
164 QTCaptureDevice *video_device = nil;
166 // check for device index given in filename
167 if (ctx->video_device_index == -1) {
168 sscanf(s->filename, "%d", &ctx->video_device_index);
171 if (ctx->video_device_index >= 0) {
172 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
174 if (ctx->video_device_index >= [devices count]) {
175 av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
179 video_device = [devices objectAtIndex:ctx->video_device_index];
180 } else if (strncmp(s->filename, "", 1) &&
181 strncmp(s->filename, "default", 7)) {
182 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
184 for (QTCaptureDevice *device in devices) {
185 if (!strncmp(s->filename, [[device localizedDisplayName] UTF8String], strlen(s->filename))) {
186 video_device = device;
191 av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
195 video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
198 BOOL success = [video_device open:nil];
200 // Video capture device not found, looking for QTMediaTypeVideo
202 video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
203 success = [video_device open:nil];
206 av_log(s, AV_LOG_ERROR, "No QT capture device found\n");
211 NSString* dev_display_name = [video_device localizedDisplayName];
212 av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]);
214 // Initialize capture session
215 ctx->capture_session = [[QTCaptureSession alloc] init];
217 QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease];
218 success = [ctx->capture_session addInput:capture_dev_input error:nil];
221 av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n");
226 // FIXME: Allow for a user defined pixel format
227 ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init];
229 NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject:
230 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB]
231 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
233 [ctx->video_output setPixelBufferAttributes:captureDictionary];
235 ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx];
237 [ctx->video_output setDelegate:ctx->qt_delegate];
238 [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES];
239 [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate];
241 success = [ctx->capture_session addOutput:ctx->video_output error:nil];
244 av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n");
248 [ctx->capture_session startRunning];
250 // Take stream info from the first frame.
251 while (ctx->frames_captured < 1) {
252 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
257 AVStream* stream = avformat_new_stream(s, NULL);
263 avpriv_set_pts_info(stream, 64, 1, QTKIT_TIMEBASE);
265 stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
266 stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
267 stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame);
268 stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame);
269 stream->codec->pix_fmt = AV_PIX_FMT_RGB24;
271 CVBufferRelease(ctx->current_frame);
272 ctx->current_frame = nil;
283 destroy_context(ctx);
288 static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt)
290 CaptureContext* ctx = (CaptureContext*)s->priv_data;
295 if (ctx->current_frame != nil) {
296 if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
300 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q);
301 pkt->stream_index = 0;
302 pkt->flags |= AV_PKT_FLAG_KEY;
304 CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
306 void* data = CVPixelBufferGetBaseAddress(ctx->current_frame);
307 memcpy(pkt->data, data, pkt->size);
309 CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
310 CVBufferRelease(ctx->current_frame);
311 ctx->current_frame = nil;
314 pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
318 } while (!pkt->data);
323 static int qtkit_close(AVFormatContext *s)
325 CaptureContext* ctx = (CaptureContext*)s->priv_data;
327 destroy_context(ctx);
332 static const AVOption options[] = {
333 { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL },
334 { "list_devices", "list available devices", offsetof(CaptureContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
335 { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
336 { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
337 { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(CaptureContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
341 static const AVClass qtkit_class = {
342 .class_name = "QTKit input device",
343 .item_name = av_default_item_name,
345 .version = LIBAVUTIL_VERSION_INT,
346 .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
349 AVInputFormat ff_qtkit_demuxer = {
351 .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"),
352 .priv_data_size = sizeof(CaptureContext),
353 .read_header = qtkit_read_header,
354 .read_packet = qtkit_read_packet,
355 .read_close = qtkit_close,
356 .flags = AVFMT_NOFILE,
357 .priv_class = &qtkit_class,