FFmpeg
avfoundation.m
Go to the documentation of this file.
1 /*
2  * AVFoundation input device
3  * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * AVFoundation input device
25  * @author Thilo Borgmann <thilo.borgmann@mail.de>
26  */
27 
28 #import <AVFoundation/AVFoundation.h>
29 #include <pthread.h>
30 
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/avstring.h"
35 #include "libavformat/demux.h"
36 #include "libavformat/internal.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/parseutils.h"
39 #include "libavutil/time.h"
40 #include "libavutil/imgutils.h"
41 #include "avdevice.h"
42 
43 static const int avf_time_base = 1000000;
44 
45 static const AVRational avf_time_base_q = {
46  .num = 1,
47  .den = avf_time_base
48 };
49 
52  OSType avf_id;
53 };
54 
55 static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
56  { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
57  { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
58  { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
59  { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
60  { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
61  { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
62  { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
63  { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
64  { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
65  { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
66  { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
67  { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
68  { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
69  { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
70  { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
71  { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
72  { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
73  { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
74  { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
75  { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
76  { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
77  { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
78 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
79  { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
80 #endif
81  { AV_PIX_FMT_NONE, 0 }
82 };
83 
84 typedef struct
85 {
86  AVClass* class;
87 
93 
95  int width, height;
96 
103 
109 
110  char *url;
113 
115 
119  int audio_be;
123 
126 
127  enum AVPixelFormat pixel_format;
128 
129  AVCaptureSession *capture_session;
130  AVCaptureVideoDataOutput *video_output;
131  AVCaptureAudioDataOutput *audio_output;
132  CMSampleBufferRef current_frame;
133  CMSampleBufferRef current_audio_frame;
134 
135  AVCaptureDevice *observed_device;
136 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
137  AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
138 #endif
140 } AVFContext;
141 
143 {
144  pthread_mutex_lock(&ctx->frame_lock);
145 }
146 
148 {
149  pthread_mutex_unlock(&ctx->frame_lock);
150 }
151 
152 /** FrameReciever class - delegate for AVCaptureSession
153  */
154 @interface AVFFrameReceiver : NSObject
155 {
157 }
158 
159 - (id)initWithContext:(AVFContext*)context;
160 
161 - (void) captureOutput:(AVCaptureOutput *)captureOutput
162  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
163  fromConnection:(AVCaptureConnection *)connection;
164 
165 @end
166 
167 @implementation AVFFrameReceiver
168 
169 - (id)initWithContext:(AVFContext*)context
170 {
171  if (self = [super init]) {
172  _context = context;
173 
174  // start observing if a device is set for it
175 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
176  if (_context->observed_device) {
177  NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
178  NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
179 
180  [_context->observed_device addObserver: self
181  forKeyPath: keyPath
182  options: options
183  context: _context];
184  }
185 #endif
186  }
187  return self;
188 }
189 
190 - (void)dealloc {
191  // stop observing if a device is set for it
192 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
193  if (_context->observed_device) {
194  NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
195  [_context->observed_device removeObserver: self forKeyPath: keyPath];
196  }
197 #endif
198  [super dealloc];
199 }
200 
201 - (void)observeValueForKeyPath:(NSString *)keyPath
202  ofObject:(id)object
203  change:(NSDictionary *)change
204  context:(void *)context {
205  if (context == _context) {
206 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
207  AVCaptureDeviceTransportControlsPlaybackMode mode =
208  [change[NSKeyValueChangeNewKey] integerValue];
209 
210  if (mode != _context->observed_mode) {
211  if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
212  _context->observed_quit = 1;
213  }
214  _context->observed_mode = mode;
215  }
216 #endif
217  } else {
218  [super observeValueForKeyPath: keyPath
219  ofObject: object
220  change: change
221  context: context];
222  }
223 }
224 
225 - (void) captureOutput:(AVCaptureOutput *)captureOutput
226  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
227  fromConnection:(AVCaptureConnection *)connection
228 {
230 
231  if (_context->current_frame != nil) {
232  CFRelease(_context->current_frame);
233  }
234 
235  _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
236 
238 
240 }
241 
242 @end
243 
244 /** AudioReciever class - delegate for AVCaptureSession
245  */
246 @interface AVFAudioReceiver : NSObject
247 {
249 }
250 
251 - (id)initWithContext:(AVFContext*)context;
252 
253 - (void) captureOutput:(AVCaptureOutput *)captureOutput
254  didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
255  fromConnection:(AVCaptureConnection *)connection;
256 
257 @end
258 
259 @implementation AVFAudioReceiver
260 
261 - (id)initWithContext:(AVFContext*)context
262 {
263  if (self = [super init]) {
264  _context = context;
265  }
266  return self;
267 }
268 
269 - (void) captureOutput:(AVCaptureOutput *)captureOutput
270  didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
271  fromConnection:(AVCaptureConnection *)connection
272 {
274 
275  if (_context->current_audio_frame != nil) {
276  CFRelease(_context->current_audio_frame);
277  }
278 
279  _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
280 
282 
284 }
285 
286 @end
287 
289 {
290  [ctx->capture_session stopRunning];
291 
292  [ctx->capture_session release];
293  [ctx->video_output release];
294  [ctx->audio_output release];
295  [ctx->avf_delegate release];
296  [ctx->avf_audio_delegate release];
297 
298  ctx->capture_session = NULL;
299  ctx->video_output = NULL;
300  ctx->audio_output = NULL;
301  ctx->avf_delegate = NULL;
302  ctx->avf_audio_delegate = NULL;
303 
304  av_freep(&ctx->url);
305  av_freep(&ctx->audio_buffer);
306 
307  pthread_mutex_destroy(&ctx->frame_lock);
308 
309  if (ctx->current_frame) {
310  CFRelease(ctx->current_frame);
311  }
312 }
313 
315 {
316  AVFContext *ctx = (AVFContext*)s->priv_data;
317  char *save;
318 
319  ctx->url = av_strdup(s->url);
320 
321  if (!ctx->url)
322  return AVERROR(ENOMEM);
323  if (ctx->url[0] != ':') {
324  ctx->video_filename = av_strtok(ctx->url, ":", &save);
325  ctx->audio_filename = av_strtok(NULL, ":", &save);
326  } else {
327  ctx->audio_filename = av_strtok(ctx->url, ":", &save);
328  }
329  return 0;
330 }
331 
332 /**
333  * Configure the video device.
334  *
335  * Configure the video device using a run-time approach to access properties
336  * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
337  * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
338  *
339  * The NSUndefinedKeyException must be handled by the caller of this function.
340  *
341  */
342 static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
343 {
344  AVFContext *ctx = (AVFContext*)s->priv_data;
345 
346  double framerate = av_q2d(ctx->framerate);
347  NSObject *range = nil;
348  NSObject *format = nil;
349  NSObject *selected_range = nil;
350  NSObject *selected_format = nil;
351 
352  // try to configure format by formats list
353  // might raise an exception if no format list is given
354  // (then fallback to default, no configuration)
355  @try {
356  for (format in [video_device valueForKey:@"formats"]) {
357  CMFormatDescriptionRef formatDescription;
358  CMVideoDimensions dimensions;
359 
360  formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
361  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
362 
363  if ((ctx->width == 0 && ctx->height == 0) ||
364  (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
365 
366  selected_format = format;
367 
368  for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
369  double max_framerate;
370 
371  [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
372  if (fabs (framerate - max_framerate) < 0.01) {
373  selected_range = range;
374  break;
375  }
376  }
377  }
378  }
379 
380  if (!selected_format) {
381  av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
382  ctx->width, ctx->height);
383  goto unsupported_format;
384  }
385 
386  if (!selected_range) {
387  av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
388  framerate);
389  if (ctx->video_is_muxed) {
390  av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
391  } else {
392  goto unsupported_format;
393  }
394  }
395 
396  if ([video_device lockForConfiguration:NULL] == YES) {
397  if (selected_format) {
398  [video_device setValue:selected_format forKey:@"activeFormat"];
399  }
400  if (selected_range) {
401  NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
402  [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
403  [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
404  }
405  } else {
406  av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
407  return AVERROR(EINVAL);
408  }
409  } @catch(NSException *e) {
410  av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
411  }
412 
413  return 0;
414 
415 unsupported_format:
416 
417  av_log(s, AV_LOG_ERROR, "Supported modes:\n");
418  for (format in [video_device valueForKey:@"formats"]) {
419  CMFormatDescriptionRef formatDescription;
420  CMVideoDimensions dimensions;
421 
422  formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
423  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
424 
425  for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
426  double min_framerate;
427  double max_framerate;
428 
429  [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
430  [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
431  av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
432  dimensions.width, dimensions.height,
433  min_framerate, max_framerate);
434  }
435  }
436  return AVERROR(EINVAL);
437 }
438 
439 static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
440 {
441  AVFContext *ctx = (AVFContext*)s->priv_data;
442  int ret;
443  NSError *error = nil;
444  AVCaptureInput* capture_input = nil;
445  struct AVFPixelFormatSpec pxl_fmt_spec;
446  NSNumber *pixel_format;
447  NSDictionary *capture_dict;
448  dispatch_queue_t queue;
449 
450  if (ctx->video_device_index < ctx->num_video_devices) {
451  capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
452  } else {
453  capture_input = (AVCaptureInput*) video_device;
454  }
455 
456  if (!capture_input) {
457  av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
458  [[error localizedDescription] UTF8String]);
459  return 1;
460  }
461 
462  if ([ctx->capture_session canAddInput:capture_input]) {
463  [ctx->capture_session addInput:capture_input];
464  } else {
465  av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
466  return 1;
467  }
468 
469  // Attaching output
470  ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
471 
472  if (!ctx->video_output) {
473  av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
474  return 1;
475  }
476 
477  // Configure device framerate and video size
478  @try {
479  if ((ret = configure_video_device(s, video_device)) < 0) {
480  return ret;
481  }
482  } @catch (NSException *exception) {
483  if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
484  av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
485  return AVERROR_EXTERNAL;
486  }
487  }
488 
489  // select pixel format
490  pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
491 
492  for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
493  if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
494  pxl_fmt_spec = avf_pixel_formats[i];
495  break;
496  }
497  }
498 
499  // check if selected pixel format is supported by AVFoundation
500  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
501  av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
502  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
503  return 1;
504  }
505 
506  // check if the pixel format is available for this device
507  if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
508  av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
509  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
510 
511  pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
512 
513  av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
514  for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
515  struct AVFPixelFormatSpec pxl_fmt_dummy;
516  pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
517  for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
518  if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
519  pxl_fmt_dummy = avf_pixel_formats[i];
520  break;
521  }
522  }
523 
524  if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
525  av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
526 
527  // select first supported pixel format instead of user selected (or default) pixel format
528  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
529  pxl_fmt_spec = pxl_fmt_dummy;
530  }
531  }
532  }
533 
534  // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
535  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
536  return 1;
537  } else {
538  av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
539  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
540  }
541  }
542 
543  // set videoSettings to an empty dict for receiving raw data of muxed devices
544  if (ctx->capture_raw_data) {
545  ctx->pixel_format = pxl_fmt_spec.ff_id;
546  ctx->video_output.videoSettings = @{ };
547  } else {
548  ctx->pixel_format = pxl_fmt_spec.ff_id;
549  pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
550  capture_dict = [NSDictionary dictionaryWithObject:pixel_format
551  forKey:(id)kCVPixelBufferPixelFormatTypeKey];
552 
553  [ctx->video_output setVideoSettings:capture_dict];
554  }
555  [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
556 
557 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
558  // check for transport control support and set observer device if supported
559  if (!ctx->video_is_screen) {
560  int trans_ctrl = [video_device transportControlsSupported];
561  AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
562 
563  if (trans_ctrl) {
564  ctx->observed_mode = trans_mode;
565  ctx->observed_device = video_device;
566  }
567  }
568 #endif
569 
570  ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
571 
572  queue = dispatch_queue_create("avf_queue", NULL);
573  [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
574  dispatch_release(queue);
575 
576  if ([ctx->capture_session canAddOutput:ctx->video_output]) {
577  [ctx->capture_session addOutput:ctx->video_output];
578  } else {
579  av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
580  return 1;
581  }
582 
583  return 0;
584 }
585 
586 static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
587 {
588  AVFContext *ctx = (AVFContext*)s->priv_data;
589  NSError *error = nil;
590  AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
591  dispatch_queue_t queue;
592 
593  if (!audio_dev_input) {
594  av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
595  [[error localizedDescription] UTF8String]);
596  return 1;
597  }
598 
599  if ([ctx->capture_session canAddInput:audio_dev_input]) {
600  [ctx->capture_session addInput:audio_dev_input];
601  } else {
602  av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
603  return 1;
604  }
605 
606  // Attaching output
607  ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
608 
609  if (!ctx->audio_output) {
610  av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
611  return 1;
612  }
613 
614  ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
615 
616  queue = dispatch_queue_create("avf_audio_queue", NULL);
617  [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
618  dispatch_release(queue);
619 
620  if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
621  [ctx->capture_session addOutput:ctx->audio_output];
622  } else {
623  av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
624  return 1;
625  }
626 
627  return 0;
628 }
629 
631 {
632  AVFContext *ctx = (AVFContext*)s->priv_data;
633  CVImageBufferRef image_buffer;
634  CMBlockBufferRef block_buffer;
635  CGSize image_buffer_size;
636  AVStream* stream = avformat_new_stream(s, NULL);
637 
638  if (!stream) {
639  return 1;
640  }
641 
642  // Take stream info from the first frame.
643  while (ctx->frames_captured < 1) {
644  CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
645  }
646 
647  lock_frames(ctx);
648 
649  ctx->video_stream_index = stream->index;
650 
651  avpriv_set_pts_info(stream, 64, 1, avf_time_base);
652 
653  image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
654  block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
655 
656  if (image_buffer) {
657  image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
658 
659  stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
660  stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
661  stream->codecpar->width = (int)image_buffer_size.width;
662  stream->codecpar->height = (int)image_buffer_size.height;
663  stream->codecpar->format = ctx->pixel_format;
664  } else {
665  stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
666  stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
667  stream->codecpar->format = ctx->pixel_format;
668  }
669 
670  CFRelease(ctx->current_frame);
671  ctx->current_frame = nil;
672 
674 
675  return 0;
676 }
677 
679 {
680  AVFContext *ctx = (AVFContext*)s->priv_data;
681  CMFormatDescriptionRef format_desc;
682  AVStream* stream = avformat_new_stream(s, NULL);
683 
684  if (!stream) {
685  return 1;
686  }
687 
688  // Take stream info from the first frame.
689  while (ctx->audio_frames_captured < 1) {
690  CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
691  }
692 
693  lock_frames(ctx);
694 
695  ctx->audio_stream_index = stream->index;
696 
697  avpriv_set_pts_info(stream, 64, 1, avf_time_base);
698 
699  format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
700  const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
701 
702  if (!basic_desc) {
704  av_log(s, AV_LOG_ERROR, "audio format not available\n");
705  return 1;
706  }
707 
708  stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
709  stream->codecpar->sample_rate = basic_desc->mSampleRate;
710  av_channel_layout_default(&stream->codecpar->ch_layout, basic_desc->mChannelsPerFrame);
711 
712  ctx->audio_channels = basic_desc->mChannelsPerFrame;
713  ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
714  ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
715  ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
716  ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
717  ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
718  ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
719 
720  if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
721  ctx->audio_float &&
722  ctx->audio_bits_per_sample == 32 &&
723  ctx->audio_packed) {
724  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
725  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
726  ctx->audio_signed_integer &&
727  ctx->audio_bits_per_sample == 16 &&
728  ctx->audio_packed) {
729  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
730  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
731  ctx->audio_signed_integer &&
732  ctx->audio_bits_per_sample == 24 &&
733  ctx->audio_packed) {
734  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
735  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
736  ctx->audio_signed_integer &&
737  ctx->audio_bits_per_sample == 32 &&
738  ctx->audio_packed) {
739  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
740  } else {
742  av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
743  return 1;
744  }
745 
746  if (ctx->audio_non_interleaved) {
747  CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
748  ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
749  ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
750  if (!ctx->audio_buffer) {
752  av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
753  return 1;
754  }
755  }
756 
757  CFRelease(ctx->current_audio_frame);
758  ctx->current_audio_frame = nil;
759 
761 
762  return 0;
763 }
764 
765 static NSArray* getDevicesWithMediaType(AVMediaType mediaType) {
766 #if ((TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000) || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500))
767  NSMutableArray *deviceTypes = nil;
768  if (mediaType == AVMediaTypeVideo) {
769  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]];
770  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000)
771  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualCamera];
772  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
773  #endif
774  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110100)
775  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
776  #endif
777  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 130000)
778  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
779  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
780  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
781  #endif
782  #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
783  [deviceTypes addObject: AVCaptureDeviceTypeDeskViewCamera];
784  #endif
785  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 150400)
786  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInLiDARDepthCamera];
787  #endif
788  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
789  [deviceTypes addObject: AVCaptureDeviceTypeContinuityCamera];
790  #endif
791  } else if (mediaType == AVMediaTypeAudio) {
792  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
793  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeMicrophone]];
794  #else
795  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInMicrophone]];
796  #endif
797  } else if (mediaType == AVMediaTypeMuxed) {
798  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
799  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternal]];
800  #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
801  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternalUnknown]];
802  #else
803  return nil;
804  #endif
805  } else {
806  return nil;
807  }
808 
809  AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession =
810  [AVCaptureDeviceDiscoverySession
811  discoverySessionWithDeviceTypes:deviceTypes
812  mediaType:mediaType
813  position:AVCaptureDevicePositionUnspecified];
814  return [captureDeviceDiscoverySession devices];
815 #else
816  return [AVCaptureDevice devicesWithMediaType:mediaType];
817 #endif
818 }
819 
821 {
822  int ret = 0;
823  NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
824  uint32_t num_screens = 0;
825  AVFContext *ctx = (AVFContext*)s->priv_data;
826  AVCaptureDevice *video_device = nil;
827  AVCaptureDevice *audio_device = nil;
828  // Find capture device
829  NSArray *devices = getDevicesWithMediaType(AVMediaTypeVideo);
830  NSArray *devices_muxed = getDevicesWithMediaType(AVMediaTypeMuxed);
831 
832  ctx->num_video_devices = [devices count] + [devices_muxed count];
833 
834  pthread_mutex_init(&ctx->frame_lock, NULL);
835 
836 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
837  CGGetActiveDisplayList(0, NULL, &num_screens);
838 #endif
839 
840  // List devices if requested
841  if (ctx->list_devices) {
842  int index = 0;
843  av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
844  for (AVCaptureDevice *device in devices) {
845  const char *name = [[device localizedName] UTF8String];
846  index = [devices indexOfObject:device];
847  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
848  }
849  for (AVCaptureDevice *device in devices_muxed) {
850  const char *name = [[device localizedName] UTF8String];
851  index = [devices count] + [devices_muxed indexOfObject:device];
852  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
853  }
854 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
855  if (num_screens > 0) {
856  CGDirectDisplayID screens[num_screens];
857  CGGetActiveDisplayList(num_screens, screens, &num_screens);
858  for (int i = 0; i < num_screens; i++) {
859  av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
860  }
861  }
862 #endif
863 
864  av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
865  devices = getDevicesWithMediaType(AVMediaTypeAudio);
866  for (AVCaptureDevice *device in devices) {
867  const char *name = [[device localizedName] UTF8String];
868  int index = [devices indexOfObject:device];
869  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
870  }
871  goto fail;
872  }
873 
874  // parse input filename for video and audio device
876  if (ret)
877  goto fail;
878 
879  // check for device index given in filename
880  if (ctx->video_device_index == -1 && ctx->video_filename) {
881  sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
882  }
883  if (ctx->audio_device_index == -1 && ctx->audio_filename) {
884  sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
885  }
886 
887  if (ctx->video_device_index >= 0) {
888  if (ctx->video_device_index < ctx->num_video_devices) {
889  if (ctx->video_device_index < [devices count]) {
890  video_device = [devices objectAtIndex:ctx->video_device_index];
891  } else {
892  video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
893  ctx->video_is_muxed = 1;
894  }
895  } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
896 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
897  CGDirectDisplayID screens[num_screens];
898  CGGetActiveDisplayList(num_screens, screens, &num_screens);
899  AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
900 
901  if (ctx->framerate.num > 0) {
902  capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
903  }
904 
905 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
906  if (ctx->capture_cursor) {
907  capture_screen_input.capturesCursor = YES;
908  } else {
909  capture_screen_input.capturesCursor = NO;
910  }
911 #endif
912 
913  if (ctx->capture_mouse_clicks) {
914  capture_screen_input.capturesMouseClicks = YES;
915  } else {
916  capture_screen_input.capturesMouseClicks = NO;
917  }
918 
919  video_device = (AVCaptureDevice*) capture_screen_input;
920  ctx->video_is_screen = 1;
921 #endif
922  } else {
923  av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
924  goto fail;
925  }
926  } else if (ctx->video_filename &&
927  strncmp(ctx->video_filename, "none", 4)) {
928  if (!strncmp(ctx->video_filename, "default", 7)) {
929  video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
930  } else {
931  // looking for video inputs
932  for (AVCaptureDevice *device in devices) {
933  if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
934  video_device = device;
935  break;
936  }
937  }
938  // looking for muxed inputs
939  for (AVCaptureDevice *device in devices_muxed) {
940  if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
941  video_device = device;
942  ctx->video_is_muxed = 1;
943  break;
944  }
945  }
946 
947 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
948  // looking for screen inputs
949  if (!video_device) {
950  int idx;
951  if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
952  CGDirectDisplayID screens[num_screens];
953  CGGetActiveDisplayList(num_screens, screens, &num_screens);
954  AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
955  video_device = (AVCaptureDevice*) capture_screen_input;
956  ctx->video_device_index = ctx->num_video_devices + idx;
957  ctx->video_is_screen = 1;
958 
959  if (ctx->framerate.num > 0) {
960  capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
961  }
962 
963 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
964  if (ctx->capture_cursor) {
965  capture_screen_input.capturesCursor = YES;
966  } else {
967  capture_screen_input.capturesCursor = NO;
968  }
969 #endif
970 
971  if (ctx->capture_mouse_clicks) {
972  capture_screen_input.capturesMouseClicks = YES;
973  } else {
974  capture_screen_input.capturesMouseClicks = NO;
975  }
976  }
977  }
978 #endif
979  }
980 
981  if (!video_device) {
982  av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
983  goto fail;
984  }
985  }
986 
987  // get audio device
988  if (ctx->audio_device_index >= 0) {
989  NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
990 
991  if (ctx->audio_device_index >= [devices count]) {
992  av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
993  goto fail;
994  }
995 
996  audio_device = [devices objectAtIndex:ctx->audio_device_index];
997  } else if (ctx->audio_filename &&
998  strncmp(ctx->audio_filename, "none", 4)) {
999  if (!strncmp(ctx->audio_filename, "default", 7)) {
1000  audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
1001  } else {
1002  NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
1003 
1004  for (AVCaptureDevice *device in devices) {
1005  if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
1006  audio_device = device;
1007  break;
1008  }
1009  }
1010  }
1011 
1012  if (!audio_device) {
1013  av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
1014  goto fail;
1015  }
1016  }
1017 
1018  // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
1019  if (!video_device && !audio_device) {
1020  av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
1021  goto fail;
1022  }
1023 
1024  if (video_device) {
1025  if (ctx->video_device_index < ctx->num_video_devices) {
1026  av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
1027  } else {
1028  av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
1029  }
1030  }
1031  if (audio_device) {
1032  av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
1033  }
1034 
1035  // Initialize capture session
1036  ctx->capture_session = [[AVCaptureSession alloc] init];
1037 
1038  if (video_device && add_video_device(s, video_device)) {
1039  goto fail;
1040  }
1041  if (audio_device && add_audio_device(s, audio_device)) {
1042  }
1043 
1044  [ctx->capture_session startRunning];
1045 
1046  /* Unlock device configuration only after the session is started so it
1047  * does not reset the capture formats */
1048  if (!ctx->video_is_screen) {
1049  [video_device unlockForConfiguration];
1050  }
1051 
1052  if (video_device && get_video_config(s)) {
1053  goto fail;
1054  }
1055 
1056  // set audio stream
1057  if (audio_device && get_audio_config(s)) {
1058  goto fail;
1059  }
1060 
1061  [pool release];
1062  return 0;
1063 
1064 fail:
1065  [pool release];
1067  if (ret)
1068  return ret;
1069  return AVERROR(EIO);
1070 }
1071 
1073  CVPixelBufferRef image_buffer,
1074  AVPacket *pkt)
1075 {
1076  AVFContext *ctx = s->priv_data;
1077  int src_linesize[4];
1078  const uint8_t *src_data[4];
1079  int width = CVPixelBufferGetWidth(image_buffer);
1080  int height = CVPixelBufferGetHeight(image_buffer);
1081  int status;
1082 
1083  memset(src_linesize, 0, sizeof(src_linesize));
1084  memset(src_data, 0, sizeof(src_data));
1085 
1086  status = CVPixelBufferLockBaseAddress(image_buffer, 0);
1087  if (status != kCVReturnSuccess) {
1088  av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
1089  return AVERROR_EXTERNAL;
1090  }
1091 
1092  if (CVPixelBufferIsPlanar(image_buffer)) {
1093  size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
1094  int i;
1095  for(i = 0; i < plane_count; i++){
1096  src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
1097  src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
1098  }
1099  } else {
1100  src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
1101  src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
1102  }
1103 
1105  src_data, src_linesize,
1106  ctx->pixel_format, width, height, 1);
1107 
1108 
1109 
1110  CVPixelBufferUnlockBaseAddress(image_buffer, 0);
1111 
1112  return status;
1113 }
1114 
1116 {
1117  AVFContext* ctx = (AVFContext*)s->priv_data;
1118 
1119  do {
1120  CVImageBufferRef image_buffer;
1121  CMBlockBufferRef block_buffer;
1122  lock_frames(ctx);
1123 
1124  if (ctx->current_frame != nil) {
1125  int status;
1126  int length = 0;
1127 
1128  image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
1129  block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
1130 
1131  if (image_buffer != nil) {
1132  length = (int)CVPixelBufferGetDataSize(image_buffer);
1133  } else if (block_buffer != nil) {
1134  length = (int)CMBlockBufferGetDataLength(block_buffer);
1135  } else {
1136  unlock_frames(ctx);
1137  return AVERROR(EINVAL);
1138  }
1139 
1140  if (av_new_packet(pkt, length) < 0) {
1141  unlock_frames(ctx);
1142  return AVERROR(EIO);
1143  }
1144 
1145  CMItemCount count;
1146  CMSampleTimingInfo timing_info;
1147 
1148  if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
1149  AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
1150  pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
1151  }
1152 
1153  pkt->stream_index = ctx->video_stream_index;
1155 
1156  if (image_buffer) {
1157  status = copy_cvpixelbuffer(s, image_buffer, pkt);
1158  } else {
1159  status = 0;
1160  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
1161  if (ret != kCMBlockBufferNoErr) {
1162  status = AVERROR(EIO);
1163  }
1164  }
1165  CFRelease(ctx->current_frame);
1166  ctx->current_frame = nil;
1167 
1168  if (status < 0) {
1169  unlock_frames(ctx);
1170  return status;
1171  }
1172  } else if (ctx->current_audio_frame != nil) {
1173  CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
1174  int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
1175 
1176  if (!block_buffer || !block_buffer_size) {
1177  unlock_frames(ctx);
1178  return AVERROR(EIO);
1179  }
1180 
1181  if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
1182  unlock_frames(ctx);
1183  return AVERROR_BUFFER_TOO_SMALL;
1184  }
1185 
1186  if (av_new_packet(pkt, block_buffer_size) < 0) {
1187  unlock_frames(ctx);
1188  return AVERROR(EIO);
1189  }
1190 
1191  CMItemCount count;
1192  CMSampleTimingInfo timing_info;
1193 
1194  if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
1195  AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
1196  pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
1197  }
1198 
1199  pkt->stream_index = ctx->audio_stream_index;
1201 
1202  if (ctx->audio_non_interleaved) {
1203  int sample, c, shift, num_samples;
1204 
1205  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
1206  if (ret != kCMBlockBufferNoErr) {
1207  unlock_frames(ctx);
1208  return AVERROR(EIO);
1209  }
1210 
1211  num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
1212 
1213  // transform decoded frame into output format
1214  #define INTERLEAVE_OUTPUT(bps) \
1215  { \
1216  int##bps##_t **src; \
1217  int##bps##_t *dest; \
1218  src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
1219  if (!src) { \
1220  unlock_frames(ctx); \
1221  return AVERROR(EIO); \
1222  } \
1223  \
1224  for (c = 0; c < ctx->audio_channels; c++) { \
1225  src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
1226  } \
1227  dest = (int##bps##_t*)pkt->data; \
1228  shift = bps - ctx->audio_bits_per_sample; \
1229  for (sample = 0; sample < num_samples; sample++) \
1230  for (c = 0; c < ctx->audio_channels; c++) \
1231  *dest++ = src[c][sample] << shift; \
1232  av_freep(&src); \
1233  }
1234 
1235  if (ctx->audio_bits_per_sample <= 16) {
1236  INTERLEAVE_OUTPUT(16)
1237  } else {
1238  INTERLEAVE_OUTPUT(32)
1239  }
1240  } else {
1241  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
1242  if (ret != kCMBlockBufferNoErr) {
1243  unlock_frames(ctx);
1244  return AVERROR(EIO);
1245  }
1246  }
1247 
1248  CFRelease(ctx->current_audio_frame);
1249  ctx->current_audio_frame = nil;
1250  } else {
1251  pkt->data = NULL;
1252  unlock_frames(ctx);
1253  if (ctx->observed_quit) {
1254  return AVERROR_EOF;
1255  } else {
1256  return AVERROR(EAGAIN);
1257  }
1258  }
1259 
1260  unlock_frames(ctx);
1261  } while (!pkt->data);
1262 
1263  return 0;
1264 }
1265 
1267 {
1268  AVFContext* ctx = (AVFContext*)s->priv_data;
1270  return 0;
1271 }
1272 
1273 static const AVOption options[] = {
1274  { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1275  { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1276  { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1277  { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
1278  { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1279  { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
1280  { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1281  { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1282  { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1283  { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1284 
1285  { NULL },
1286 };
1287 
1288 static const AVClass avf_class = {
1289  .class_name = "AVFoundation indev",
1290  .item_name = av_default_item_name,
1291  .option = options,
1292  .version = LIBAVUTIL_VERSION_INT,
1294 };
1295 
1297  .p.name = "avfoundation",
1298  .p.long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
1299  .p.flags = AVFMT_NOFILE,
1300  .p.priv_class = &avf_class,
1301  .priv_data_size = sizeof(AVFContext),
1304  .read_close = avf_close,
1305 };
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:31
AV_CODEC_ID_PCM_S16LE
@ AV_CODEC_ID_PCM_S16LE
Definition: codec_id.h:328
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_CODEC_ID_PCM_F32BE
@ AV_CODEC_ID_PCM_F32BE
Definition: codec_id.h:348
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVFContext::audio_buffer_size
int audio_buffer_size
Definition: avfoundation.m:125
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVFContext::audio_float
int audio_float
Definition: avfoundation.m:118
AVFContext::observed_quit
int observed_quit
Definition: avfoundation.m:139
unlock_frames
static void unlock_frames(AVFContext *ctx)
Definition: avfoundation.m:147
avformat_new_stream
AVStream * avformat_new_stream(AVFormatContext *s, const struct AVCodec *c)
Add a new stream to a media file.
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:248
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
AV_CODEC_ID_RAWVIDEO
@ AV_CODEC_ID_RAWVIDEO
Definition: codec_id.h:65
AVFContext::current_audio_frame
CMSampleBufferRef current_audio_frame
Definition: avfoundation.m:133
pixdesc.h
AVFContext::audio_frames_captured
int audio_frames_captured
Definition: avfoundation.m:89
AVPacket::data
uint8_t * data
Definition: packet.h:522
AVOption
AVOption.
Definition: opt.h:346
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:76
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:577
parse_device_name
static int parse_device_name(AVFormatContext *s)
Definition: avfoundation.m:314
AV_PIX_FMT_RGB555BE
@ AV_PIX_FMT_RGB555BE
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
Definition: pixfmt.h:114
AVFContext::audio_channels
int audio_channels
Definition: avfoundation.m:116
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
AVFContext::video_filename
char * video_filename
Definition: avfoundation.m:111
AVFPixelFormatSpec::avf_id
OSType avf_id
Definition: avfoundation.m:52
AVFContext::audio_be
int audio_be
Definition: avfoundation.m:119
AVFContext::capture_cursor
int capture_cursor
Definition: avfoundation.m:97
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:853
AV_CODEC_ID_PCM_S16BE
@ AV_CODEC_ID_PCM_S16BE
Definition: codec_id.h:329
fail
#define fail()
Definition: checkasm.h:179
avf_close
static int avf_close(AVFormatContext *s)
Definition: avfoundation.m:1266
avf_time_base
static const int avf_time_base
Definition: avfoundation.m:43
read_close
static av_cold int read_close(AVFormatContext *ctx)
Definition: libcdio.c:143
AVFContext::current_frame
CMSampleBufferRef current_frame
Definition: avfoundation.m:132
AVFPixelFormatSpec::ff_id
enum AVPixelFormat ff_id
Definition: avfoundation.m:51
AVFContext::observed_device
AVCaptureDevice * observed_device
Definition: avfoundation.m:135
AVERROR_BUFFER_TOO_SMALL
#define AVERROR_BUFFER_TOO_SMALL
Buffer too small.
Definition: error.h:53
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFContext::framerate
AVRational framerate
Definition: avfoundation.m:94
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:481
description
Tag description
Definition: snow.txt:206
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
avf_time_base_q
static const AVRational avf_time_base_q
Definition: avfoundation.m:45
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:490
read_packet
static int read_packet(void *opaque, uint8_t *buf, int buf_size)
Definition: avio_read_callback.c:41
AVFContext::num_video_devices
int num_video_devices
Definition: avfoundation.m:114
INTERLEAVE_OUTPUT
#define INTERLEAVE_OUTPUT(bps)
width
#define width
getDevicesWithMediaType
static NSArray * getDevicesWithMediaType(AVMediaType mediaType)
Definition: avfoundation.m:765
s
#define s(width, name)
Definition: cbs_vp9.c:198
av_new_packet
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:98
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:553
AVFAudioReceiver::_context
AVFContext * _context
Definition: avfoundation.m:248
options
static const AVOption options[]
Definition: avfoundation.m:1273
add_audio_device
static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
Definition: avfoundation.m:586
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:178
AVFContext::capture_mouse_clicks
int capture_mouse_clicks
Definition: avfoundation.m:98
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVFContext::frame_lock
pthread_mutex_t frame_lock
Definition: avfoundation.m:90
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVFContext::capture_raw_data
int capture_raw_data
Definition: avfoundation.m:99
AVFContext::list_devices
int list_devices
Definition: avfoundation.m:104
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AVFPixelFormatSpec
Definition: avfoundation.m:50
get_video_config
static int get_video_config(AVFormatContext *s)
Definition: avfoundation.m:630
if
if(ret)
Definition: filter_design.txt:179
AVFContext::audio_packed
int audio_packed
Definition: avfoundation.m:121
AVFFrameReceiver::_context
AVFContext * _context
Definition: avfoundation.m:156
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
AVFormatContext
Format I/O context.
Definition: avformat.h:1255
internal.h
AVFContext::video_output
AVCaptureVideoDataOutput * video_output
Definition: avfoundation.m:130
framerate
float framerate
Definition: av1_levels.c:29
AVFContext::audio_signed_integer
int audio_signed_integer
Definition: avfoundation.m:120
AV_PIX_FMT_RGB565LE
@ AV_PIX_FMT_RGB565LE
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
Definition: pixfmt.h:113
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
read_header
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:550
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
fabs
static __device__ float fabs(float a)
Definition: cuda_runtime.h:182
NULL
#define NULL
Definition: coverity.c:32
AVFContext::drop_late_frames
int drop_late_frames
Definition: avfoundation.m:100
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
add_video_device
static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
Definition: avfoundation.m:439
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVFFrameReceiver
FrameReciever class - delegate for AVCaptureSession.
Definition: avfoundation.m:154
ff_avfoundation_demuxer
const FFInputFormat ff_avfoundation_demuxer
Definition: avfoundation.m:1296
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:83
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:245
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
parseutils.h
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
@ AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
Definition: log.h:41
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
avf_read_packet
static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
Definition: avfoundation.m:1115
AVFContext::width
int width
Definition: avfoundation.m:95
configure_video_device
static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
Configure the video device.
Definition: avfoundation.m:342
index
int index
Definition: gxfenc.c:89
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:81
AVFContext::audio_buffer
int32_t * audio_buffer
Definition: avfoundation.m:124
AVFContext::video_stream_index
int video_stream_index
Definition: avfoundation.m:106
AV_CODEC_ID_PCM_S24LE
@ AV_CODEC_ID_PCM_S24LE
Definition: codec_id.h:340
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:365
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:75
AVMediaType
AVMediaType
Definition: avutil.h:199
AVPacket::size
int size
Definition: packet.h:523
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
destroy_context
static void destroy_context(AVFContext *ctx)
Definition: avfoundation.m:288
shift
static int shift(int a, int b)
Definition: bonk.c:262
AVFContext::url
char * url
Definition: avfoundation.m:110
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1371
sample
#define sample
Definition: flacdsp_template.c:44
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
AVFMT_NOFILE
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:468
AVFContext::audio_non_interleaved
int audio_non_interleaved
Definition: avfoundation.m:122
range
enum AVColorRange range
Definition: mediacodec_wrapper.c:2646
avdevice.h
FFInputFormat::p
AVInputFormat p
The public AVInputFormat.
Definition: demux.h:35
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:521
height
#define height
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:528
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_channel_layout_default
void av_channel_layout_default(AVChannelLayout *ch_layout, int nb_channels)
Get the default channel layout for a given number of channels.
Definition: channel_layout.c:830
lock_frames
static void lock_frames(AVFContext *ctx)
Definition: avfoundation.m:142
AVFContext::audio_stream_index
int audio_stream_index
Definition: avfoundation.m:108
copy_cvpixelbuffer
static int copy_cvpixelbuffer(AVFormatContext *s, CVPixelBufferRef image_buffer, AVPacket *pkt)
Definition: avfoundation.m:1072
AV_PIX_FMT_RGB555LE
@ AV_PIX_FMT_RGB555LE
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:115
AVFContext::audio_bits_per_sample
int audio_bits_per_sample
Definition: avfoundation.m:117
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:515
avf_read_header
static int avf_read_header(AVFormatContext *s)
Definition: avfoundation.m:820
internal.h
AV_CODEC_ID_DVVIDEO
@ AV_CODEC_ID_DVVIDEO
Definition: codec_id.h:76
AV_CODEC_ID_PCM_S32BE
@ AV_CODEC_ID_PCM_S32BE
Definition: codec_id.h:337
demux.h
AVFContext::frames_captured
int frames_captured
Definition: avfoundation.m:88
AVFContext::video_is_muxed
int video_is_muxed
Definition: avfoundation.m:101
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:743
AV_PIX_FMT_0BGR
@ AV_PIX_FMT_0BGR
packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
Definition: pixfmt.h:264
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
AVFContext::audio_device_index
int audio_device_index
Definition: avfoundation.m:107
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
avf_pixel_formats
static const struct AVFPixelFormatSpec avf_pixel_formats[]
Definition: avfoundation.m:55
AVFContext::audio_output
AVCaptureAudioDataOutput * audio_output
Definition: avfoundation.m:131
id
enum AVCodecID id
Definition: dts2pts.c:364
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
status
ov_status_e status
Definition: dnn_backend_openvino.c:120
AVFContext::avf_audio_delegate
id avf_audio_delegate
Definition: avfoundation.m:92
channel_layout.h
AVFContext::video_is_screen
int video_is_screen
Definition: avfoundation.m:102
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
AV_OPT_TYPE_PIXEL_FMT
@ AV_OPT_TYPE_PIXEL_FMT
Definition: opt.h:246
AVPacket::stream_index
int stream_index
Definition: packet.h:524
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFContext::audio_filename
char * audio_filename
Definition: avfoundation.m:112
AV_PIX_FMT_RGB565BE
@ AV_PIX_FMT_RGB565BE
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
Definition: pixfmt.h:112
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
A generic parameter which can be set by the user for demuxing or decoding.
Definition: opt.h:273
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_CODEC_ID_PCM_S32LE
@ AV_CODEC_ID_PCM_S32LE
Definition: codec_id.h:336
get_audio_config
static int get_audio_config(AVFormatContext *s)
Definition: avfoundation.m:678
AVFContext
Definition: avfoundation.m:84
timing_info
static int FUNC() timing_info(CodedBitstreamContext *ctx, RWContext *rw, AV1RawTimingInfo *current)
Definition: cbs_av1_syntax_template.c:158
av_image_copy_to_buffer
int av_image_copy_to_buffer(uint8_t *dst, int dst_size, const uint8_t *const src_data[4], const int src_linesize[4], enum AVPixelFormat pix_fmt, int width, int height, int align)
Copy image data from an image into a buffer.
Definition: imgutils.c:501
AVPacket
This structure stores compressed data.
Definition: packet.h:499
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:251
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
FFInputFormat
Definition: demux.h:31
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
AVFContext::video_device_index
int video_device_index
Definition: avfoundation.m:105
AV_PIX_FMT_0RGB
@ AV_PIX_FMT_0RGB
packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
Definition: pixfmt.h:262
AV_CODEC_ID_PCM_F32LE
@ AV_CODEC_ID_PCM_F32LE
Definition: codec_id.h:349
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVFAudioReceiver
AudioReciever class - delegate for AVCaptureSession.
Definition: avfoundation.m:246
avstring.h
AVFContext::avf_delegate
id avf_delegate
Definition: avfoundation.m:91
AV_PIX_FMT_YUVA444P16LE
@ AV_PIX_FMT_YUVA444P16LE
planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
Definition: pixfmt.h:192
avf_class
static const AVClass avf_class
Definition: avfoundation.m:1288
int
int
Definition: ffmpeg_filter.c:425
AVFContext::capture_session
AVCaptureSession * capture_session
Definition: avfoundation.m:129
AV_CODEC_ID_PCM_S24BE
@ AV_CODEC_ID_PCM_S24BE
Definition: codec_id.h:341
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2882
AV_PIX_FMT_BGR48BE
@ AV_PIX_FMT_BGR48BE
packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:145
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:77