Imported Debian version 2.4.3~trusty1
[deb_ffmpeg.git] / ffmpeg / libavdevice / qtkit.m
CommitLineData
2ba45a60
DM
1/*
2 * QTKit input device
3 * Copyright (c) 2013 Vadim Kalinsky <vadim@kalinsky.ru>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22/**
23 * @file
24 * QTKit input device
25 * @author Vadim Kalinsky <vadim@kalinsky.ru>
26 */
27
28#import <QTKit/QTKit.h>
29#include <pthread.h>
30
31#include "libavutil/pixdesc.h"
32#include "libavutil/opt.h"
33#include "libavformat/internal.h"
34#include "libavutil/internal.h"
35#include "libavutil/time.h"
36#include "avdevice.h"
37
38#define QTKIT_TIMEBASE 100
39
40static const AVRational kQTKitTimeBase_q = {
41 .num = 1,
42 .den = QTKIT_TIMEBASE
43};
44
45typedef struct
46{
47 AVClass* class;
48
49 float frame_rate;
50 int frames_captured;
51 int64_t first_pts;
52 pthread_mutex_t frame_lock;
53 pthread_cond_t frame_wait_cond;
54 id qt_delegate;
55
56 int list_devices;
57 int video_device_index;
58
59 QTCaptureSession* capture_session;
60 QTCaptureDecompressedVideoOutput* video_output;
61 CVImageBufferRef current_frame;
62} CaptureContext;
63
64static void lock_frames(CaptureContext* ctx)
65{
66 pthread_mutex_lock(&ctx->frame_lock);
67}
68
69static void unlock_frames(CaptureContext* ctx)
70{
71 pthread_mutex_unlock(&ctx->frame_lock);
72}
73
74/** FrameReciever class - delegate for QTCaptureSession
75 */
76@interface FFMPEG_FrameReceiver : NSObject
77{
78 CaptureContext* _context;
79}
80
81- (id)initWithContext:(CaptureContext*)context;
82
83- (void)captureOutput:(QTCaptureOutput *)captureOutput
84 didOutputVideoFrame:(CVImageBufferRef)videoFrame
85 withSampleBuffer:(QTSampleBuffer *)sampleBuffer
86 fromConnection:(QTCaptureConnection *)connection;
87
88@end
89
90@implementation FFMPEG_FrameReceiver
91
92- (id)initWithContext:(CaptureContext*)context
93{
94 if (self = [super init]) {
95 _context = context;
96 }
97 return self;
98}
99
100- (void)captureOutput:(QTCaptureOutput *)captureOutput
101 didOutputVideoFrame:(CVImageBufferRef)videoFrame
102 withSampleBuffer:(QTSampleBuffer *)sampleBuffer
103 fromConnection:(QTCaptureConnection *)connection
104{
105 lock_frames(_context);
106 if (_context->current_frame != nil) {
107 CVBufferRelease(_context->current_frame);
108 }
109
110 _context->current_frame = CVBufferRetain(videoFrame);
111
112 pthread_cond_signal(&_context->frame_wait_cond);
113
114 unlock_frames(_context);
115
116 ++_context->frames_captured;
117}
118
119@end
120
121static void destroy_context(CaptureContext* ctx)
122{
123 [ctx->capture_session stopRunning];
124
125 [ctx->capture_session release];
126 [ctx->video_output release];
127 [ctx->qt_delegate release];
128
129 ctx->capture_session = NULL;
130 ctx->video_output = NULL;
131 ctx->qt_delegate = NULL;
132
133 pthread_mutex_destroy(&ctx->frame_lock);
134 pthread_cond_destroy(&ctx->frame_wait_cond);
135
136 if (ctx->current_frame)
137 CVBufferRelease(ctx->current_frame);
138}
139
140static int qtkit_read_header(AVFormatContext *s)
141{
142 NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
143
144 CaptureContext* ctx = (CaptureContext*)s->priv_data;
145
146 ctx->first_pts = av_gettime();
147
148 pthread_mutex_init(&ctx->frame_lock, NULL);
149 pthread_cond_init(&ctx->frame_wait_cond, NULL);
150
151 // List devices if requested
152 if (ctx->list_devices) {
153 av_log(ctx, AV_LOG_INFO, "QTKit video devices:\n");
154 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
155 for (QTCaptureDevice *device in devices) {
156 const char *name = [[device localizedDisplayName] UTF8String];
157 int index = [devices indexOfObject:device];
158 av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
159 }
160 goto fail;
161 }
162
163 // Find capture device
164 QTCaptureDevice *video_device = nil;
165
166 // check for device index given in filename
167 if (ctx->video_device_index == -1) {
168 sscanf(s->filename, "%d", &ctx->video_device_index);
169 }
170
171 if (ctx->video_device_index >= 0) {
172 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
173
174 if (ctx->video_device_index >= [devices count]) {
175 av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
176 goto fail;
177 }
178
179 video_device = [devices objectAtIndex:ctx->video_device_index];
180 } else if (strncmp(s->filename, "", 1) &&
181 strncmp(s->filename, "default", 7)) {
182 NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
183
184 for (QTCaptureDevice *device in devices) {
185 if (!strncmp(s->filename, [[device localizedDisplayName] UTF8String], strlen(s->filename))) {
186 video_device = device;
187 break;
188 }
189 }
190 if (!video_device) {
191 av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
192 goto fail;
193 }
194 } else {
195 video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
196 }
197
198 BOOL success = [video_device open:nil];
199
200 // Video capture device not found, looking for QTMediaTypeVideo
201 if (!success) {
202 video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
203 success = [video_device open:nil];
204
205 if (!success) {
206 av_log(s, AV_LOG_ERROR, "No QT capture device found\n");
207 goto fail;
208 }
209 }
210
211 NSString* dev_display_name = [video_device localizedDisplayName];
212 av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]);
213
214 // Initialize capture session
215 ctx->capture_session = [[QTCaptureSession alloc] init];
216
217 QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease];
218 success = [ctx->capture_session addInput:capture_dev_input error:nil];
219
220 if (!success) {
221 av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n");
222 goto fail;
223 }
224
225 // Attaching output
226 // FIXME: Allow for a user defined pixel format
227 ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init];
228
229 NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject:
230 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB]
231 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
232
233 [ctx->video_output setPixelBufferAttributes:captureDictionary];
234
235 ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx];
236
237 [ctx->video_output setDelegate:ctx->qt_delegate];
238 [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES];
239 [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate];
240
241 success = [ctx->capture_session addOutput:ctx->video_output error:nil];
242
243 if (!success) {
244 av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n");
245 goto fail;
246 }
247
248 [ctx->capture_session startRunning];
249
250 // Take stream info from the first frame.
251 while (ctx->frames_captured < 1) {
252 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
253 }
254
255 lock_frames(ctx);
256
257 AVStream* stream = avformat_new_stream(s, NULL);
258
259 if (!stream) {
260 goto fail;
261 }
262
263 avpriv_set_pts_info(stream, 64, 1, QTKIT_TIMEBASE);
264
265 stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
266 stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
267 stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame);
268 stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame);
269 stream->codec->pix_fmt = AV_PIX_FMT_RGB24;
270
271 CVBufferRelease(ctx->current_frame);
272 ctx->current_frame = nil;
273
274 unlock_frames(ctx);
275
276 [pool release];
277
278 return 0;
279
280fail:
281 [pool release];
282
283 destroy_context(ctx);
284
285 return AVERROR(EIO);
286}
287
288static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt)
289{
290 CaptureContext* ctx = (CaptureContext*)s->priv_data;
291
292 do {
293 lock_frames(ctx);
294
295 if (ctx->current_frame != nil) {
296 if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
297 return AVERROR(EIO);
298 }
299
300 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q);
301 pkt->stream_index = 0;
302 pkt->flags |= AV_PKT_FLAG_KEY;
303
304 CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
305
306 void* data = CVPixelBufferGetBaseAddress(ctx->current_frame);
307 memcpy(pkt->data, data, pkt->size);
308
309 CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
310 CVBufferRelease(ctx->current_frame);
311 ctx->current_frame = nil;
312 } else {
313 pkt->data = NULL;
314 pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
315 }
316
317 unlock_frames(ctx);
318 } while (!pkt->data);
319
320 return 0;
321}
322
323static int qtkit_close(AVFormatContext *s)
324{
325 CaptureContext* ctx = (CaptureContext*)s->priv_data;
326
327 destroy_context(ctx);
328
329 return 0;
330}
331
332static const AVOption options[] = {
333 { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL },
334 { "list_devices", "list available devices", offsetof(CaptureContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
335 { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
336 { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
337 { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(CaptureContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
338 { NULL },
339};
340
341static const AVClass qtkit_class = {
342 .class_name = "QTKit input device",
343 .item_name = av_default_item_name,
344 .option = options,
345 .version = LIBAVUTIL_VERSION_INT,
346 .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
347};
348
349AVInputFormat ff_qtkit_demuxer = {
350 .name = "qtkit",
351 .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"),
352 .priv_data_size = sizeof(CaptureContext),
353 .read_header = qtkit_read_header,
354 .read_packet = qtkit_read_packet,
355 .read_close = qtkit_close,
356 .flags = AVFMT_NOFILE,
357 .priv_class = &qtkit_class,
358};