Imported Debian version 2.5.2~trusty
[deb_ffmpeg.git] / ffmpeg / libavdevice / avfoundation.m
CommitLineData
2ba45a60
DM
1/*
2 * AVFoundation input device
3 * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22/**
23 * @file
24 * AVFoundation input device
25 * @author Thilo Borgmann <thilo.borgmann@mail.de>
26 */
27
28#import <AVFoundation/AVFoundation.h>
29#include <pthread.h>
30
31#include "libavutil/pixdesc.h"
32#include "libavutil/opt.h"
f6fa7814 33#include "libavutil/avstring.h"
2ba45a60
DM
34#include "libavformat/internal.h"
35#include "libavutil/internal.h"
36#include "libavutil/time.h"
37#include "avdevice.h"
38
f6fa7814 39static const int avf_time_base = 1000000;
2ba45a60
DM
40
41static const AVRational avf_time_base_q = {
42 .num = 1,
43 .den = avf_time_base
44};
45
46struct AVFPixelFormatSpec {
47 enum AVPixelFormat ff_id;
48 OSType avf_id;
49};
50
51static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
52 { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
53 { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
54 { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
55 { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
56 { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
57 { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
58 { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
59 { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
60 { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
61 { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
62 { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
63 { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
64 { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
65 { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
66 { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
67 { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
68 { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
69 { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
70 { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
71 { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
72 { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
73 { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
74#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
75 { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
76#endif
77 { AV_PIX_FMT_NONE, 0 }
78};
79
80typedef struct
81{
82 AVClass* class;
83
2ba45a60 84 int frames_captured;
f6fa7814 85 int audio_frames_captured;
2ba45a60 86 int64_t first_pts;
f6fa7814 87 int64_t first_audio_pts;
2ba45a60
DM
88 pthread_mutex_t frame_lock;
89 pthread_cond_t frame_wait_cond;
90 id avf_delegate;
f6fa7814 91 id avf_audio_delegate;
2ba45a60
DM
92
93 int list_devices;
94 int video_device_index;
f6fa7814
DM
95 int video_stream_index;
96 int audio_device_index;
97 int audio_stream_index;
98
99 char *video_filename;
100 char *audio_filename;
101
102 int num_video_devices;
103
104 int audio_channels;
105 int audio_bits_per_sample;
106 int audio_float;
107 int audio_be;
108 int audio_signed_integer;
109 int audio_packed;
110 int audio_non_interleaved;
111
112 int32_t *audio_buffer;
113 int audio_buffer_size;
114
2ba45a60
DM
115 enum AVPixelFormat pixel_format;
116
117 AVCaptureSession *capture_session;
118 AVCaptureVideoDataOutput *video_output;
f6fa7814 119 AVCaptureAudioDataOutput *audio_output;
2ba45a60 120 CMSampleBufferRef current_frame;
f6fa7814 121 CMSampleBufferRef current_audio_frame;
2ba45a60
DM
122} AVFContext;
123
124static void lock_frames(AVFContext* ctx)
125{
126 pthread_mutex_lock(&ctx->frame_lock);
127}
128
129static void unlock_frames(AVFContext* ctx)
130{
131 pthread_mutex_unlock(&ctx->frame_lock);
132}
133
134/** FrameReciever class - delegate for AVCaptureSession
135 */
136@interface AVFFrameReceiver : NSObject
137{
138 AVFContext* _context;
139}
140
141- (id)initWithContext:(AVFContext*)context;
142
143- (void) captureOutput:(AVCaptureOutput *)captureOutput
144 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
145 fromConnection:(AVCaptureConnection *)connection;
146
147@end
148
149@implementation AVFFrameReceiver
150
151- (id)initWithContext:(AVFContext*)context
152{
153 if (self = [super init]) {
154 _context = context;
155 }
156 return self;
157}
158
159- (void) captureOutput:(AVCaptureOutput *)captureOutput
160 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
161 fromConnection:(AVCaptureConnection *)connection
162{
163 lock_frames(_context);
164
165 if (_context->current_frame != nil) {
166 CFRelease(_context->current_frame);
167 }
168
169 _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
170
171 pthread_cond_signal(&_context->frame_wait_cond);
172
173 unlock_frames(_context);
174
175 ++_context->frames_captured;
176}
177
178@end
179
f6fa7814
DM
180/** AudioReciever class - delegate for AVCaptureSession
181 */
182@interface AVFAudioReceiver : NSObject
183{
184 AVFContext* _context;
185}
186
187- (id)initWithContext:(AVFContext*)context;
188
189- (void) captureOutput:(AVCaptureOutput *)captureOutput
190 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
191 fromConnection:(AVCaptureConnection *)connection;
192
193@end
194
195@implementation AVFAudioReceiver
196
197- (id)initWithContext:(AVFContext*)context
198{
199 if (self = [super init]) {
200 _context = context;
201 }
202 return self;
203}
204
205- (void) captureOutput:(AVCaptureOutput *)captureOutput
206 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
207 fromConnection:(AVCaptureConnection *)connection
208{
209 lock_frames(_context);
210
211 if (_context->current_audio_frame != nil) {
212 CFRelease(_context->current_audio_frame);
213 }
214
215 _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
216
217 pthread_cond_signal(&_context->frame_wait_cond);
218
219 unlock_frames(_context);
220
221 ++_context->audio_frames_captured;
222}
223
224@end
225
2ba45a60
DM
226static void destroy_context(AVFContext* ctx)
227{
228 [ctx->capture_session stopRunning];
229
230 [ctx->capture_session release];
231 [ctx->video_output release];
f6fa7814 232 [ctx->audio_output release];
2ba45a60 233 [ctx->avf_delegate release];
f6fa7814 234 [ctx->avf_audio_delegate release];
2ba45a60
DM
235
236 ctx->capture_session = NULL;
237 ctx->video_output = NULL;
f6fa7814 238 ctx->audio_output = NULL;
2ba45a60 239 ctx->avf_delegate = NULL;
f6fa7814
DM
240 ctx->avf_audio_delegate = NULL;
241
242 av_freep(&ctx->audio_buffer);
2ba45a60
DM
243
244 pthread_mutex_destroy(&ctx->frame_lock);
245 pthread_cond_destroy(&ctx->frame_wait_cond);
246
247 if (ctx->current_frame) {
248 CFRelease(ctx->current_frame);
249 }
250}
251
f6fa7814 252static void parse_device_name(AVFormatContext *s)
2ba45a60 253{
f6fa7814
DM
254 AVFContext *ctx = (AVFContext*)s->priv_data;
255 char *tmp = av_strdup(s->filename);
256 char *save;
2ba45a60 257
f6fa7814
DM
258 if (tmp[0] != ':') {
259 ctx->video_filename = av_strtok(tmp, ":", &save);
260 ctx->audio_filename = av_strtok(NULL, ":", &save);
2ba45a60 261 } else {
f6fa7814 262 ctx->audio_filename = av_strtok(tmp, ":", &save);
2ba45a60 263 }
f6fa7814 264}
2ba45a60 265
f6fa7814
DM
266static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
267{
268 AVFContext *ctx = (AVFContext*)s->priv_data;
269 NSError *error = nil;
270 AVCaptureInput* capture_input = nil;
2ba45a60 271
f6fa7814
DM
272 if (ctx->video_device_index < ctx->num_video_devices) {
273 capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
274 } else {
275 capture_input = (AVCaptureInput*) video_device;
2ba45a60
DM
276 }
277
f6fa7814 278 if (!capture_input) {
2ba45a60
DM
279 av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
280 [[error localizedDescription] UTF8String]);
f6fa7814 281 return 1;
2ba45a60
DM
282 }
283
f6fa7814
DM
284 if ([ctx->capture_session canAddInput:capture_input]) {
285 [ctx->capture_session addInput:capture_input];
2ba45a60
DM
286 } else {
287 av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
f6fa7814 288 return 1;
2ba45a60
DM
289 }
290
291 // Attaching output
292 ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
293
294 if (!ctx->video_output) {
295 av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
f6fa7814 296 return 1;
2ba45a60
DM
297 }
298
299 // select pixel format
300 struct AVFPixelFormatSpec pxl_fmt_spec;
301 pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
302
303 for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
304 if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
305 pxl_fmt_spec = avf_pixel_formats[i];
306 break;
307 }
308 }
309
310 // check if selected pixel format is supported by AVFoundation
311 if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
312 av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
313 av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
f6fa7814 314 return 1;
2ba45a60
DM
315 }
316
317 // check if the pixel format is available for this device
318 if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
319 av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
320 av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
321
322 pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
323
324 av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
325 for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
326 struct AVFPixelFormatSpec pxl_fmt_dummy;
327 pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
328 for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
329 if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
330 pxl_fmt_dummy = avf_pixel_formats[i];
331 break;
332 }
333 }
334
335 if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
336 av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
337
338 // select first supported pixel format instead of user selected (or default) pixel format
339 if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
340 pxl_fmt_spec = pxl_fmt_dummy;
341 }
342 }
343 }
344
345 // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
346 if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
f6fa7814 347 return 1;
2ba45a60
DM
348 } else {
349 av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
350 av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
351 }
352 }
353
f6fa7814 354 ctx->pixel_format = pxl_fmt_spec.ff_id;
2ba45a60
DM
355 NSNumber *pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
356 NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:pixel_format
357 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
358
359 [ctx->video_output setVideoSettings:capture_dict];
360 [ctx->video_output setAlwaysDiscardsLateVideoFrames:YES];
361
362 ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
363
364 dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL);
365 [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
366 dispatch_release(queue);
367
368 if ([ctx->capture_session canAddOutput:ctx->video_output]) {
369 [ctx->capture_session addOutput:ctx->video_output];
370 } else {
371 av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
f6fa7814 372 return 1;
2ba45a60
DM
373 }
374
f6fa7814
DM
375 return 0;
376}
377
378static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
379{
380 AVFContext *ctx = (AVFContext*)s->priv_data;
381 NSError *error = nil;
382 AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
383
384 if (!audio_dev_input) {
385 av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
386 [[error localizedDescription] UTF8String]);
387 return 1;
388 }
389
390 if ([ctx->capture_session canAddInput:audio_dev_input]) {
391 [ctx->capture_session addInput:audio_dev_input];
392 } else {
393 av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
394 return 1;
395 }
396
397 // Attaching output
398 ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
399
400 if (!ctx->audio_output) {
401 av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
402 return 1;
403 }
404
405 ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
406
407 dispatch_queue_t queue = dispatch_queue_create("avf_audio_queue", NULL);
408 [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
409 dispatch_release(queue);
410
411 if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
412 [ctx->capture_session addOutput:ctx->audio_output];
413 } else {
414 av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
415 return 1;
416 }
417
418 return 0;
419}
420
421static int get_video_config(AVFormatContext *s)
422{
423 AVFContext *ctx = (AVFContext*)s->priv_data;
2ba45a60
DM
424
425 // Take stream info from the first frame.
426 while (ctx->frames_captured < 1) {
427 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
428 }
429
430 lock_frames(ctx);
431
432 AVStream* stream = avformat_new_stream(s, NULL);
433
434 if (!stream) {
f6fa7814 435 return 1;
2ba45a60
DM
436 }
437
f6fa7814
DM
438 ctx->video_stream_index = stream->index;
439
2ba45a60
DM
440 avpriv_set_pts_info(stream, 64, 1, avf_time_base);
441
442 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
443 CGSize image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
444
445 stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
446 stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
447 stream->codec->width = (int)image_buffer_size.width;
448 stream->codec->height = (int)image_buffer_size.height;
f6fa7814 449 stream->codec->pix_fmt = ctx->pixel_format;
2ba45a60
DM
450
451 CFRelease(ctx->current_frame);
452 ctx->current_frame = nil;
453
454 unlock_frames(ctx);
f6fa7814
DM
455
456 return 0;
457}
458
459static int get_audio_config(AVFormatContext *s)
460{
461 AVFContext *ctx = (AVFContext*)s->priv_data;
462
463 // Take stream info from the first frame.
464 while (ctx->audio_frames_captured < 1) {
465 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
466 }
467
468 lock_frames(ctx);
469
470 AVStream* stream = avformat_new_stream(s, NULL);
471
472 if (!stream) {
473 return 1;
474 }
475
476 ctx->audio_stream_index = stream->index;
477
478 avpriv_set_pts_info(stream, 64, 1, avf_time_base);
479
480 CMFormatDescriptionRef format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
481 const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
482
483 if (!basic_desc) {
484 av_log(s, AV_LOG_ERROR, "audio format not available\n");
485 return 1;
486 }
487
488 stream->codec->codec_type = AVMEDIA_TYPE_AUDIO;
489 stream->codec->sample_rate = basic_desc->mSampleRate;
490 stream->codec->channels = basic_desc->mChannelsPerFrame;
491 stream->codec->channel_layout = av_get_default_channel_layout(stream->codec->channels);
492
493 ctx->audio_channels = basic_desc->mChannelsPerFrame;
494 ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
495 ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
496 ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
497 ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
498 ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
499 ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
500
501 if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
502 ctx->audio_float &&
503 ctx->audio_packed) {
504 stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
505 } else {
506 av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
507 return 1;
508 }
509
510 if (ctx->audio_non_interleaved) {
511 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
512 ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
513 ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
514 if (!ctx->audio_buffer) {
515 av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
516 return 1;
517 }
518 }
519
520 CFRelease(ctx->current_audio_frame);
521 ctx->current_audio_frame = nil;
522
523 unlock_frames(ctx);
524
525 return 0;
526}
527
528static int avf_read_header(AVFormatContext *s)
529{
530 NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
531 AVFContext *ctx = (AVFContext*)s->priv_data;
532 ctx->first_pts = av_gettime();
533 ctx->first_audio_pts = av_gettime();
534 uint32_t num_screens = 0;
535
536 pthread_mutex_init(&ctx->frame_lock, NULL);
537 pthread_cond_init(&ctx->frame_wait_cond, NULL);
538
539#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
540 CGGetActiveDisplayList(0, NULL, &num_screens);
541#endif
542
543 // List devices if requested
544 if (ctx->list_devices) {
545 av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
546 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
547 int index = 0;
548 for (AVCaptureDevice *device in devices) {
549 const char *name = [[device localizedName] UTF8String];
550 index = [devices indexOfObject:device];
551 av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
552 index++;
553 }
554#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
555 if (num_screens > 0) {
556 CGDirectDisplayID screens[num_screens];
557 CGGetActiveDisplayList(num_screens, screens, &num_screens);
558 for (int i = 0; i < num_screens; i++) {
559 av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", index + i, i);
560 }
561 }
562#endif
563
564 av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
565 devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
566 for (AVCaptureDevice *device in devices) {
567 const char *name = [[device localizedName] UTF8String];
568 int index = [devices indexOfObject:device];
569 av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
570 }
571 goto fail;
572 }
573
574 // Find capture device
575 AVCaptureDevice *video_device = nil;
576 AVCaptureDevice *audio_device = nil;
577
578 NSArray *video_devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
579 ctx->num_video_devices = [video_devices count];
580
581 // parse input filename for video and audio device
582 parse_device_name(s);
583
584 // check for device index given in filename
585 if (ctx->video_device_index == -1 && ctx->video_filename) {
586 sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
587 }
588 if (ctx->audio_device_index == -1 && ctx->audio_filename) {
589 sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
590 }
591
592 if (ctx->video_device_index >= 0) {
593 if (ctx->video_device_index < ctx->num_video_devices) {
594 video_device = [video_devices objectAtIndex:ctx->video_device_index];
595 } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
596#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
597 CGDirectDisplayID screens[num_screens];
598 CGGetActiveDisplayList(num_screens, screens, &num_screens);
599 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
600 video_device = (AVCaptureDevice*) capture_screen_input;
601#endif
602 } else {
603 av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
604 goto fail;
605 }
606 } else if (ctx->video_filename &&
607 strncmp(ctx->video_filename, "none", 4)) {
608 if (!strncmp(ctx->video_filename, "default", 7)) {
609 video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
610 } else {
611 // looking for video inputs
612 for (AVCaptureDevice *device in video_devices) {
613 if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
614 video_device = device;
615 break;
616 }
617 }
618
619#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
620 // looking for screen inputs
621 if (!video_device) {
622 int idx;
623 if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
624 CGDirectDisplayID screens[num_screens];
625 CGGetActiveDisplayList(num_screens, screens, &num_screens);
626 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
627 video_device = (AVCaptureDevice*) capture_screen_input;
628 ctx->video_device_index = ctx->num_video_devices + idx;
629 }
630 }
631#endif
632 }
633
634 if (!video_device) {
635 av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
636 goto fail;
637 }
638 }
639
640 // get audio device
641 if (ctx->audio_device_index >= 0) {
642 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
643
644 if (ctx->audio_device_index >= [devices count]) {
645 av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
646 goto fail;
647 }
648
649 audio_device = [devices objectAtIndex:ctx->audio_device_index];
650 } else if (ctx->audio_filename &&
651 strncmp(ctx->audio_filename, "none", 4)) {
652 if (!strncmp(ctx->audio_filename, "default", 7)) {
653 audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
654 } else {
655 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
656
657 for (AVCaptureDevice *device in devices) {
658 if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
659 audio_device = device;
660 break;
661 }
662 }
663 }
664
665 if (!audio_device) {
666 av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
667 goto fail;
668 }
669 }
670
671 // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
672 if (!video_device && !audio_device) {
673 av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
674 goto fail;
675 }
676
677 if (video_device) {
678 if (ctx->video_device_index < ctx->num_video_devices) {
679 av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
680 } else {
681 av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
682 }
683 }
684 if (audio_device) {
685 av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
686 }
687
688 // Initialize capture session
689 ctx->capture_session = [[AVCaptureSession alloc] init];
690
691 if (video_device && add_video_device(s, video_device)) {
692 goto fail;
693 }
694 if (audio_device && add_audio_device(s, audio_device)) {
695 }
696
697 [ctx->capture_session startRunning];
698
699 if (video_device && get_video_config(s)) {
700 goto fail;
701 }
702
703 // set audio stream
704 if (audio_device && get_audio_config(s)) {
705 goto fail;
706 }
707
2ba45a60
DM
708 [pool release];
709 return 0;
710
711fail:
712 [pool release];
713 destroy_context(ctx);
714 return AVERROR(EIO);
715}
716
717static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
718{
719 AVFContext* ctx = (AVFContext*)s->priv_data;
720
721 do {
722 lock_frames(ctx);
723
724 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
725
726 if (ctx->current_frame != nil) {
727 if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) {
728 return AVERROR(EIO);
729 }
730
731 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
732 AV_TIME_BASE_Q,
733 avf_time_base_q);
f6fa7814 734 pkt->stream_index = ctx->video_stream_index;
2ba45a60
DM
735 pkt->flags |= AV_PKT_FLAG_KEY;
736
737 CVPixelBufferLockBaseAddress(image_buffer, 0);
738
739 void* data = CVPixelBufferGetBaseAddress(image_buffer);
740 memcpy(pkt->data, data, pkt->size);
741
742 CVPixelBufferUnlockBaseAddress(image_buffer, 0);
743 CFRelease(ctx->current_frame);
744 ctx->current_frame = nil;
f6fa7814
DM
745 } else if (ctx->current_audio_frame != nil) {
746 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
747 int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
748
749 if (!block_buffer || !block_buffer_size) {
750 return AVERROR(EIO);
751 }
752
753 if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
754 return AVERROR_BUFFER_TOO_SMALL;
755 }
756
757 if (av_new_packet(pkt, block_buffer_size) < 0) {
758 return AVERROR(EIO);
759 }
760
761 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_audio_pts,
762 AV_TIME_BASE_Q,
763 avf_time_base_q);
764
765 pkt->stream_index = ctx->audio_stream_index;
766 pkt->flags |= AV_PKT_FLAG_KEY;
767
768 if (ctx->audio_non_interleaved) {
769 int sample, c, shift;
770
771 OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
772 if (ret != kCMBlockBufferNoErr) {
773 return AVERROR(EIO);
774 }
775
776 int num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
777
778 // transform decoded frame into output format
779 #define INTERLEAVE_OUTPUT(bps) \
780 { \
781 int##bps##_t **src; \
782 int##bps##_t *dest; \
783 src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
784 if (!src) return AVERROR(EIO); \
785 for (c = 0; c < ctx->audio_channels; c++) { \
786 src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
787 } \
788 dest = (int##bps##_t*)pkt->data; \
789 shift = bps - ctx->audio_bits_per_sample; \
790 for (sample = 0; sample < num_samples; sample++) \
791 for (c = 0; c < ctx->audio_channels; c++) \
792 *dest++ = src[c][sample] << shift; \
793 av_freep(&src); \
794 }
795
796 if (ctx->audio_bits_per_sample <= 16) {
797 INTERLEAVE_OUTPUT(16)
798 } else {
799 INTERLEAVE_OUTPUT(32)
800 }
801 } else {
802 OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
803 if (ret != kCMBlockBufferNoErr) {
804 return AVERROR(EIO);
805 }
806 }
807
808 CFRelease(ctx->current_audio_frame);
809 ctx->current_audio_frame = nil;
2ba45a60
DM
810 } else {
811 pkt->data = NULL;
812 pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
813 }
814
815 unlock_frames(ctx);
816 } while (!pkt->data);
817
818 return 0;
819}
820
821static int avf_close(AVFormatContext *s)
822{
823 AVFContext* ctx = (AVFContext*)s->priv_data;
824 destroy_context(ctx);
825 return 0;
826}
827
828static const AVOption options[] = {
2ba45a60
DM
829 { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
830 { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
831 { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
832 { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
f6fa7814 833 { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
2ba45a60
DM
834 { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
835 { NULL },
836};
837
838static const AVClass avf_class = {
839 .class_name = "AVFoundation input device",
840 .item_name = av_default_item_name,
841 .option = options,
842 .version = LIBAVUTIL_VERSION_INT,
843 .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
844};
845
846AVInputFormat ff_avfoundation_demuxer = {
847 .name = "avfoundation",
848 .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
849 .priv_data_size = sizeof(AVFContext),
850 .read_header = avf_read_header,
851 .read_packet = avf_read_packet,
852 .read_close = avf_close,
853 .flags = AVFMT_NOFILE,
854 .priv_class = &avf_class,
855};