Libav
avfoundation_dec.m
Go to the documentation of this file.
1 /*
2  * AVFoundation input device
3  * Copyright (c) 2015 Luca Barbato
4  * Alexandre Lision
5  *
6  * This file is part of Libav.
7  *
8  * Libav is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * Libav is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with Libav; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #import <AVFoundation/AVFoundation.h>
24 #include <pthread.h>
25 
26 #include "libavformat/avformat.h"
27 #include "libavformat/internal.h"
28 
29 #include "libavutil/log.h"
30 #include "libavutil/mathematics.h"
31 #include "libavutil/opt.h"
32 #include "libavutil/parseutils.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35 
36 #include "avdevice.h"
37 
41 };
42 
43 static const struct AVPixelFormatMap pixel_format_map[] = {
44  { AV_PIX_FMT_ABGR, kCVPixelFormatType_32ABGR },
45  { AV_PIX_FMT_ARGB, kCVPixelFormatType_32ARGB },
46  { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
47  { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
48  { AV_PIX_FMT_BGRA, kCVPixelFormatType_32BGRA },
49  { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
50  { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
51  { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
52  { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
53  { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
54  { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
55  { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
56  { AV_PIX_FMT_RGBA, kCVPixelFormatType_32RGBA },
57  { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
58  { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
59  { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
60  { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
61  { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
62  { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
63  { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
64  { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
65  { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
66 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
67  { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
68 #endif
69  { AV_PIX_FMT_NONE, 0 }
70 };
71 
73 {
74  int i;
75  for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
76  if (core_video_fmt == pixel_format_map[i].core_video_fmt)
77  return pixel_format_map[i].pix_fmt;
78  return AV_PIX_FMT_NONE;
79 }
80 
82 {
83  int i;
84  for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
85  if (pix_fmt == pixel_format_map[i].pix_fmt)
86  return pixel_format_map[i].core_video_fmt;
87  return 0;
88 }
89 
91  AVClass *class;
92  /* AVOptions */
95  char *pixel_format;
96  char *video_size; /* String describing video size */
97  char *framerate; /* String describing the framerate */
98 
100  int width, height;
102 
103  int64_t first_pts;
107 
108  /* ARC-compatible pointers to ObjC objects */
109  CFTypeRef session; /* AVCaptureSession */
110  CFTypeRef video_output;
111  CFTypeRef video_delegate;
112  CVImageBufferRef current_frame;
114 
115 #define AUDIO_DEVICES 1
116 #define VIDEO_DEVICES 2
117 #define ALL_DEVICES AUDIO_DEVICES | VIDEO_DEVICES
118 
119 #define OFFSET(x) offsetof(AVFoundationCaptureContext, x)
120 #define DEC AV_OPT_FLAG_DECODING_PARAM
121 static const AVOption options[] = {
122  { "list_devices", "List available devices and exit", OFFSET(list_devices), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_devices" },
123  { "all", "Show all the supported devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = ALL_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
124  { "audio", "Show only the audio devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = AUDIO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
125  { "video", "Show only the video devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = VIDEO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
126  { "list_formats", "List available formats and exit", OFFSET(list_formats), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
127  { "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
128  { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
129  { "framerate", "A string representing desired framerate", OFFSET(framerate), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
130  { NULL },
131 };
132 
133 static void list_capture_devices_by_type(AVFormatContext *s, NSString *type)
134 {
135  NSArray *devices = [AVCaptureDevice devicesWithMediaType:type];
136 
137  av_log(s, AV_LOG_INFO, "Type: %s\n", [type UTF8String]);
138  for (AVCaptureDevice *device in devices) {
139  av_log(s, AV_LOG_INFO, "uniqueID: %s\nname: %s\nformat:\n",
140  [[device uniqueID] UTF8String],
141  [[device localizedName] UTF8String]);
142 
143  for (AVCaptureDeviceFormat *format in device.formats)
144  av_log(s, AV_LOG_INFO, "\t%s\n",
145  [[NSString stringWithFormat: @ "%@", format] UTF8String]);
146  }
147 }
148 
150 {
152 
153  if (ctx->list_devices & AUDIO_DEVICES)
154  list_capture_devices_by_type(s, AVMediaTypeAudio);
155 
156  if (ctx->list_devices & VIDEO_DEVICES)
157  list_capture_devices_by_type(s, AVMediaTypeVideo);
158 
159  return AVERROR_EXIT;
160 }
161 
163 {
164  av_log(s, AV_LOG_VERBOSE, "Supported pixel formats (first is more efficient):\n");
165  AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
166 
167  for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
168  OSType cv_fmt = [cv_pixel_format intValue];
170  if (pix_fmt != AV_PIX_FMT_NONE) {
171  av_log(s, AV_LOG_VERBOSE, " %s: %d\n",
172  av_get_pix_fmt_name(pix_fmt),
173  cv_fmt);
174  }
175  }
176  return AVERROR_EXIT;
177 }
178 
180 {
182 }
183 
185 {
187 }
188 
189 @interface VideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
190 {
191  AVFoundationCaptureContext *_context;
192 }
193 
194 - (id)initWithContext:(AVFoundationCaptureContext *)context;
195 
196 - (void)captureOutput:(AVCaptureOutput *)captureOutput
197  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
198  fromConnection:(AVCaptureConnection *)connection;
199 
200 @end
201 
202 @implementation VideoCapture
203 
204 - (id)initWithContext:(AVFoundationCaptureContext *)context
205 {
206  if (self = [super init]) {
207  _context = context;
208  }
209  return self;
210 }
211 
212 - (void)captureOutput:(AVCaptureOutput *)captureOutput
213  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
214  fromConnection:(AVCaptureConnection *)connection
215 {
216  CVImageBufferRef buf;
218 
219  if (_context->current_frame != nil) {
220  CFRelease(_context->current_frame);
221  }
222 
223  buf = CMSampleBufferGetImageBuffer(videoFrame);
224  if (!buf)
225  return;
226 
227  CFRetain(buf);
228 
229  _context->current_frame = buf;
230 
232 
234 
236 }
237 
238 @end
239 
243 static bool configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
244 {
246  AVCaptureDeviceFormat *selected_format = nil;
247  AVFrameRateRange *selected_range = nil;
248  double framerate = av_q2d(ctx->internal_framerate);
249  double epsilon = 0.00000001;
250 
251  for (AVCaptureDeviceFormat *format in[video_device formats]) {
252  CMFormatDescriptionRef formatDescription;
253  CMVideoDimensions dimensions;
254 
255  formatDescription = (CMFormatDescriptionRef)format.formatDescription;
256  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
257 
258  if ((ctx->width == 0 && ctx->height == 0) ||
259  (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
260  av_log(s, AV_LOG_VERBOSE, "Trying video size %dx%d\n",
261  dimensions.width, dimensions.height);
262  ctx->width = dimensions.width;
263  ctx->height = dimensions.height;
264  selected_format = format;
265  if (framerate) {
266  av_log(s, AV_LOG_VERBOSE, "Checking support for framerate %f\n",
267  framerate);
268  for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
269  if (range.minFrameRate <= (framerate + epsilon) &&
270  range.maxFrameRate >= (framerate - epsilon)) {
271  selected_range = range;
272  break;
273  }
274  } else {
275  selected_range = format.videoSupportedFrameRateRanges[0];
276  framerate = selected_range.maxFrameRate;
277  break;
278  }
279 
280  if (selected_format && selected_range)
281  break;
282  }
283  }
284 
285  if (!selected_format) {
286  av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
287  ctx->width, ctx->height);
288  return false;
289  } else {
290  av_log(s, AV_LOG_VERBOSE, "Setting video size to %dx%d\n",
291  ctx->width, ctx->height);
292  }
293 
294  if (framerate && !selected_range) {
295  av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
296  framerate);
297  return false;
298  } else {
299  av_log(s, AV_LOG_VERBOSE, "Setting framerate to %f\n",
300  framerate);
301  }
302 
303  if ([video_device lockForConfiguration : NULL] == YES) {
304  [video_device setActiveFormat : selected_format];
305  [video_device setActiveVideoMinFrameDuration : CMTimeMake(1, framerate)];
306  [video_device setActiveVideoMaxFrameDuration : CMTimeMake(1, framerate)];
307  } else {
308  av_log(s, AV_LOG_ERROR, "Could not lock device for configuration\n");
309  return false;
310  }
311  return true;
312 }
313 
314 static void print_supported_formats(AVFormatContext *s, AVCaptureDevice *device)
315 {
316  av_log(s, AV_LOG_WARNING, "Supported modes:\n");
317  for (AVCaptureDeviceFormat *format in[device formats]) {
318  CMFormatDescriptionRef formatDescription;
319  CMVideoDimensions dimensions;
320 
321  formatDescription = (CMFormatDescriptionRef)format.formatDescription;
322  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
323 
324  for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
325  av_log(s, AV_LOG_WARNING, " %dx%d@[%f %f]fps\n",
326  dimensions.width, dimensions.height,
327  range.minFrameRate, range.maxFrameRate);
328  }
329 }
330 
331 static int setup_stream(AVFormatContext *s, AVCaptureDevice *device)
332 {
334  NSError *__autoreleasing error = nil;
335  AVCaptureDeviceInput *input;
336  AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
337 
338  av_log(s, AV_LOG_VERBOSE, "Setting up stream for device %s\n", [[device uniqueID] UTF8String]);
339 
340  if (!configure_video_device(s, device)) {
341  av_log(s, AV_LOG_ERROR, "device configuration failed\n");
342  print_supported_formats(s, device);
343  return AVERROR(EINVAL);
344  }
345 
346  // add the input devices
347  input = [AVCaptureDeviceInput deviceInputWithDevice:device
348  error:&error];
349  if (!input) {
350  av_log(s, AV_LOG_ERROR, "%s\n",
351  [[error localizedDescription] UTF8String]);
352  return AVERROR_UNKNOWN;
353  }
354 
355  if ([session canAddInput : input]) {
356  [session addInput : input];
357  } else {
358  av_log(s, AV_LOG_ERROR, "Cannot add video input to capture session\n");
359  return AVERROR(EINVAL);
360  }
361 
362  // add the output devices
363  if ([device hasMediaType : AVMediaTypeVideo]) {
364  AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
365  NSNumber *core_video_fmt = nil;
366  if (!out) {
367  av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
368  return AVERROR(EINVAL);
369  }
370 
371  [out setAlwaysDiscardsLateVideoFrames : YES];
372 
373  if (ctx->pixel_format) {
374  // Try to use specified pixel format
375  core_video_fmt = [NSNumber numberWithInt:pix_fmt_to_core_video(av_get_pix_fmt(ctx->pixel_format))];
376  if ([[out availableVideoCVPixelFormatTypes] indexOfObject : core_video_fmt] != NSNotFound) {
377  av_log(s, AV_LOG_VERBOSE, "Pixel format %s supported!\n", ctx->pixel_format);
378  } else {
379  core_video_fmt = nil;
380  }
381  }
382 
383  if (!ctx->pixel_format || !core_video_fmt) {
384  av_log(s, AV_LOG_VERBOSE, "Pixel format not supported or not provided, overriding...\n");
385  for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
386  OSType cv_fmt = [cv_pixel_format intValue];
388  // Use the first one in the list, it will be the most effective
389  if (pix_fmt != AV_PIX_FMT_NONE) {
390  core_video_fmt = cv_pixel_format;
391  ctx->pixel_format = av_strdup(av_get_pix_fmt_name(pix_fmt));
392  break;
393  }
394  }
395  }
396 
397  // fail if there is no appropriate pixel format
398  if (!core_video_fmt) {
399  return AVERROR(EINVAL);
400  } else {
401  av_log(s, AV_LOG_VERBOSE, "Using %s.\n",
402  ctx->pixel_format);
403  }
404 
405  NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:core_video_fmt
406  forKey:(const NSString *)kCVPixelBufferPixelFormatTypeKey];
407  [out setVideoSettings : capture_dict];
408 
409  VideoCapture *delegate = [[VideoCapture alloc] initWithContext:ctx];
410 
411  dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL);
412  [out setSampleBufferDelegate : delegate queue : queue];
413 
414  if ([session canAddOutput : out]) {
415  [session addOutput : out];
416  ctx->video_output = (__bridge_retained CFTypeRef)out;
417  ctx->video_delegate = (__bridge_retained CFTypeRef)delegate;
418  } else {
419  av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
420  return AVERROR(EINVAL);
421  }
422  }
423 
424  return 0;
425 }
426 
427 static int get_video_config(AVFormatContext *s)
428 {
430  CVImageBufferRef image_buffer;
431  CGSize image_buffer_size;
432  AVStream *stream = avformat_new_stream(s, NULL);
433 
434  if (!stream) {
435  av_log(s, AV_LOG_ERROR, "Failed to create AVStream\n");
436  return AVERROR(EINVAL);
437  }
438 
439  // Take stream info from the first frame.
440  while (ctx->frames_captured < 1)
441  CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
442 
443  lock_frames(ctx);
444 
445  ctx->video_stream_index = stream->index;
446 
447  avpriv_set_pts_info(stream, 64, 1, 1000000);
448 
449  image_buffer = ctx->current_frame;
450  image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
451 
452  stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
453  stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
454  stream->codec->width = (int)image_buffer_size.width;
455  stream->codec->height = (int)image_buffer_size.height;
456  stream->codec->pix_fmt = av_get_pix_fmt(ctx->pixel_format);
457 
458  CFRelease(ctx->current_frame);
459  ctx->current_frame = nil;
460 
461  unlock_frames(ctx);
462 
463  return 0;
464 }
465 
467 {
468  AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
469  [session stopRunning];
470 
471  ctx->session = NULL;
472 
475 
476  if (ctx->current_frame) {
477  CFRelease(ctx->current_frame);
478  }
479 }
480 
482 {
483  AVCaptureDevice *device;
484  for (NSString *type in @[AVMediaTypeVideo]) {
485  device = [AVCaptureDevice defaultDeviceWithMediaType:type];
486  if (device) {
487  av_log(s, AV_LOG_VERBOSE, "Using default device %s\n",
488  [[device uniqueID] UTF8String]);
489  return setup_stream(s, device);
490  }
491  }
492  return AVERROR(EINVAL);
493 }
494 
499 static AVCaptureDevice *create_device(AVFormatContext *s)
500 {
501  NSString *filename;
502  NSError *__autoreleasing error = nil;
503  NSRegularExpression *exp;
504  NSArray *matches;
505  AVCaptureDevice *device;
506 
507  filename = [NSString stringWithFormat:@ "%s", s->filename];
508 
509  if ((device = [AVCaptureDevice deviceWithUniqueID:filename])) {
510  av_log(s, AV_LOG_VERBOSE, "Device with name %s found\n", [filename UTF8String]);
511  return device;
512  }
513 
514  // Remove '[]' from the device name
515  NSString *pat = @"(?<=\\[).*?(?=\\])";
516  exp = [NSRegularExpression regularExpressionWithPattern:pat
517  options:0
518  error:&error];
519  if (!exp) {
520  av_log(s, AV_LOG_ERROR, "%s\n",
521  [[error localizedDescription] UTF8String]);
522  return NULL;
523  }
524 
525  matches = [exp matchesInString:filename options:0
526  range:NSMakeRange(0, [filename length])];
527 
528  if (matches.count > 0) {
529  for (NSTextCheckingResult *match in matches) {
530  NSRange range = [match rangeAtIndex:0];
531  NSString *uniqueID = [filename substringWithRange:NSMakeRange(range.location, range.length)];
532  av_log(s, AV_LOG_VERBOSE, "opening device with ID: %s\n", [uniqueID UTF8String]);
533  if (!(device = [AVCaptureDevice deviceWithUniqueID:uniqueID])) {
534  av_log(s, AV_LOG_ERROR, "Device with name %s not found", [filename UTF8String]);
535  return NULL;
536  }
537  return device;
538  }
539  }
540  return NULL;
541 }
542 
543 static int setup_streams(AVFormatContext *s)
544 {
546  int ret;
547  AVCaptureDevice *device;
548 
551 
552  ctx->session = (__bridge_retained CFTypeRef)[[AVCaptureSession alloc] init];
553 
554  if (!strncmp(s->filename, "default", 7)) {
555  ret = setup_default_stream(s);
556  } else {
557  device = create_device(s);
558  if (device) {
559  ret = setup_stream(s, device);
560  } else {
561  av_log(s, AV_LOG_ERROR, "No matches for %s\n", s->filename);
562  ret = setup_default_stream(s);
563  }
564  }
565 
566  if (ret < 0) {
567  av_log(s, AV_LOG_ERROR, "No device could be added\n");
568  return ret;
569  }
570 
571  av_log(s, AV_LOG_VERBOSE, "Starting session!\n");
572  [(__bridge AVCaptureSession *)ctx->session startRunning];
573 
574  // Session is started, unlock device
575  [device unlockForConfiguration];
576 
577  av_log(s, AV_LOG_VERBOSE, "Checking video config\n");
578  if (get_video_config(s)) {
579  destroy_context(ctx);
580  return AVERROR(EIO);
581  }
582 
583  return 0;
584 }
585 
587 {
589  ctx->first_pts = av_gettime();
590 
591  AVRational framerate_q = { 0, 1 };
592  ctx->internal_framerate = framerate_q;
593 
594  if (ctx->list_devices)
596  if (ctx->list_formats) {
597  return list_formats(s);
598  }
599 
600  if (ctx->pixel_format) {
602  av_log(s, AV_LOG_ERROR, "No such input format: %s.\n",
603  ctx->pixel_format);
604  return AVERROR(EINVAL);
605  }
606  }
607 
608  if (ctx->video_size &&
609  (av_parse_video_size(&ctx->width, &ctx->height, ctx->video_size)) < 0) {
610  av_log(s, AV_LOG_ERROR, "Could not parse video size '%s'.\n",
611  ctx->video_size);
612  return AVERROR(EINVAL);
613  }
614 
615  if (ctx->framerate &&
616  (av_parse_video_rate(&ctx->internal_framerate, ctx->framerate)) < 0) {
617  av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
618  ctx->framerate);
619  return AVERROR(EINVAL);
620  }
621 
622  return setup_streams(s);
623 }
624 
626 {
628 
629  do {
630  lock_frames(ctx);
631 
632  if (ctx->current_frame != nil) {
633  if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
634  return AVERROR(EIO);
635  }
636 
637  pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
639  (AVRational) {1, 1000000 });
640  pkt->stream_index = ctx->video_stream_index;
641  pkt->flags |= AV_PKT_FLAG_KEY;
642 
643  CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
644 
645  void *data = CVPixelBufferGetBaseAddress(ctx->current_frame);
646  memcpy(pkt->data, data, pkt->size);
647 
648  CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
649  CFRelease(ctx->current_frame);
650  ctx->current_frame = nil;
651  } else {
652  pkt->data = NULL;
654  }
655 
656  unlock_frames(ctx);
657  } while (!pkt->data);
658 
659  return 0;
660 }
661 
663 {
664  av_log(s, AV_LOG_VERBOSE, "Closing session...\n");
666  destroy_context(ctx);
667  return 0;
668 }
669 
670 static const AVClass avfoundation_class = {
671  .class_name = "AVFoundation AVCaptureDevice indev",
672  .item_name = av_default_item_name,
673  .option = options,
674  .version = LIBAVUTIL_VERSION_INT,
675 };
676 
678  .name = "avfoundation",
679  .long_name = NULL_IF_CONFIG_SMALL("AVFoundation AVCaptureDevice grab"),
680  .priv_data_size = sizeof(AVFoundationCaptureContext),
684  .flags = AVFMT_NOFILE,
685  .priv_class = &avfoundation_class,
686 };
static av_unused void pthread_cond_signal(pthread_cond_t *cond)
Definition: w32pthreads.h:354
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:78
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:127
AVOption.
Definition: opt.h:234
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:100
enum AVCodecID id
Definition: mxfenc.c:85
static av_unused void pthread_cond_init(pthread_cond_t *cond, const void *unused_attr)
Definition: w32pthreads.h:248
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:64
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:130
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:61
void avpriv_set_pts_info(AVStream *s, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: utils.c:2986
static int get_video_config(AVFormatContext *s)
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0 ...
Definition: pixfmt.h:112
int size
Definition: avcodec.h:1347
av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (%s)\, len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt), use_generic ? ac->func_descr_generic :ac->func_descr)
static enum AVSampleFormat formats[]
Definition: avresample.c:163
#define VIDEO_DEVICES
static void destroy_context(AVFoundationCaptureContext *ctx)
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
Definition: pixfmt.h:110
Format I/O context.
Definition: avformat.h:940
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:39
AVOptions.
CRITICAL_SECTION pthread_mutex_t
Definition: w32pthreads.h:60
AVStream * avformat_new_stream(AVFormatContext *s, const AVCodec *c)
Add a new stream to a media file.
Definition: utils.c:2648
static void print_supported_formats(AVFormatContext *s, AVCaptureDevice *device)
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
Definition: pixfmt.h:109
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:91
const char data[16]
Definition: mxf.c:70
#define OFFSET(x)
static double av_q2d(AVRational a)
Convert rational to double.
Definition: rational.h:69
uint8_t * data
Definition: avcodec.h:1346
static int flags
Definition: log.c:50
static av_cold int read_close(AVFormatContext *ctx)
Definition: libcdio.c:145
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:140
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1378
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:99
Main libavdevice API header.
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:84
static void lock_frames(AVFoundationCaptureContext *ctx)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:124
#define AVERROR(e)
Definition: error.h:43
static int pthread_mutex_init(pthread_mutex_t *m, void *attr)
Definition: w32pthreads.h:104
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:148
static int pthread_mutex_unlock(pthread_mutex_t *m)
Definition: w32pthreads.h:119
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:92
static int setup_streams(AVFormatContext *s)
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:86
static int setup_default_stream(AVFormatContext *s)
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:265
#define DEC
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:89
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:90
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1352
static const struct AVPixelFormatMap pixel_format_map[]
static enum AVPixelFormat core_video_to_pix_fmt(OSType core_video_fmt)
CVImageBufferRef current_frame
static int pthread_mutex_destroy(pthread_mutex_t *m)
Definition: w32pthreads.h:109
static av_unused void pthread_cond_destroy(pthread_cond_t *cond)
Definition: w32pthreads.h:275
char filename[1024]
input or output filename
Definition: avformat.h:1016
static av_unused int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: w32pthreads.h:322
AVFormatContext * ctx
Definition: movenc.c:48
static int avfoundation_read_close(AVFormatContext *s)
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:62
static int avfoundation_list_capture_devices(AVFormatContext *s)
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:52
packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:147
LIBAVUTIL_VERSION_INT
Definition: eval.c:55
static int avfoundation_read_packet(AVFormatContext *s, AVPacket *pkt)
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:546
int64_t av_gettime(void)
Get the current time in microseconds.
Definition: time.c:40
Stream structure.
Definition: avformat.h:705
static const AVOption options[]
NULL
Definition: eval.c:55
static int pthread_mutex_lock(pthread_mutex_t *m)
Definition: w32pthreads.h:114
#define AV_LOG_INFO
Standard information.
Definition: log.h:135
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:247
char * av_strdup(const char *s)
Duplicate the string s.
Definition: mem.c:219
static int avfoundation_read_header(AVFormatContext *s)
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:170
av_default_item_name
Definition: dnxhdenc.c:55
static void(WINAPI *cond_broadcast)(pthread_cond_t *cond)
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:60
enum AVPixelFormat pix_fmt
static int read_packet(AVFormatContext *ctx, AVPacket *pkt)
Definition: libcdio.c:114
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
const char * format
Definition: movenc.c:47
Describe the class of an AVClass context structure.
Definition: log.h:34
planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
Definition: pixfmt.h:188
rational number numerator/denominator
Definition: rational.h:43
AVFoundationCaptureContext * _context
static AVCaptureDevice * create_device(AVFormatContext *s)
Try to open device given in filename Two supported formats: "device_unique_id" or "[device_unique_id]...
misc parsing utilities
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:264
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0 ...
Definition: pixfmt.h:111
int height
Definition: gxfenc.c:72
static const AVClass avfoundation_class
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb...
Definition: pixfmt.h:69
Main libavformat public API header.
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:59
Y , 8bpp.
Definition: pixfmt.h:67
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:412
#define AUDIO_DEVICES
static OSType pix_fmt_to_core_video(enum AVPixelFormat pix_fmt)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:61
static av_cold int init(AVCodecParserContext *s)
Definition: h264_parser.c:582
static void list_capture_devices_by_type(AVFormatContext *s, NSString *type)
static void unlock_frames(AVFoundationCaptureContext *ctx)
void * priv_data
Format private data.
Definition: avformat.h:968
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1345
static bool configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
Configure the video device.
FILE * out
Definition: movenc.c:54
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:529
static int list_formats(AVFormatContext *s)
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:1716
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:1704
int stream_index
Definition: avcodec.h:1348
static int setup_stream(AVFormatContext *s, AVCaptureDevice *device)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:57
This structure stores compressed data.
Definition: avcodec.h:1323
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:267
AVInputFormat ff_avfoundation_demuxer
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1339
#define ALL_DEVICES