FFmpeg
hwcontext_videotoolbox.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <VideoToolbox/VideoToolbox.h>
25 
26 #include "buffer.h"
27 #include "buffer_internal.h"
28 #include "common.h"
29 #include "hwcontext.h"
30 #include "hwcontext_internal.h"
31 #include "hwcontext_videotoolbox.h"
32 #include "mem.h"
33 #include "pixfmt.h"
34 #include "pixdesc.h"
35 
36 typedef struct VTFramesContext {
37  CVPixelBufferPoolRef pool;
39 
40 static const struct {
41  uint32_t cv_fmt;
42  bool full_range;
44 } cv_pix_fmts[] = {
45  { kCVPixelFormatType_420YpCbCr8Planar, false, AV_PIX_FMT_YUV420P },
46  { kCVPixelFormatType_422YpCbCr8, false, AV_PIX_FMT_UYVY422 },
47  { kCVPixelFormatType_32BGRA, true, AV_PIX_FMT_BGRA },
48 #ifdef kCFCoreFoundationVersionNumber10_7
49  { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV12 },
50  { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV12 },
51  { kCVPixelFormatType_4444AYpCbCr16, false, AV_PIX_FMT_AYUV64 },
52 #endif
53 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
56 #endif
57 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
58  { kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV16 },
59  { kCVPixelFormatType_422YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV16 },
60 #endif
61 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
62  { kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange, false, AV_PIX_FMT_P210 },
63  { kCVPixelFormatType_422YpCbCr10BiPlanarFullRange, true, AV_PIX_FMT_P210 },
64 #endif
65 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
66  { kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange, false, AV_PIX_FMT_P216 },
67 #endif
68 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
69  { kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV24 },
70  { kCVPixelFormatType_444YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV24 },
71 #endif
72 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
73  { kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange, false, AV_PIX_FMT_P410 },
74  { kCVPixelFormatType_444YpCbCr10BiPlanarFullRange, true, AV_PIX_FMT_P410 },
75 #endif
76 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
77  { kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange, false, AV_PIX_FMT_P416 },
78 #endif
79 };
80 
81 static const enum AVPixelFormat supported_formats[] = {
82 #ifdef kCFCoreFoundationVersionNumber10_7
85 #endif
88 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
90 #endif
91 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
93 #endif
94 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
96 #endif
97 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
99 #endif
100 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
102 #endif
103 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
105 #endif
106 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
108 #endif
110 };
111 
113  const void *hwconfig,
114  AVHWFramesConstraints *constraints)
115 {
116  int i;
117 
119  sizeof(*constraints->valid_sw_formats));
120  if (!constraints->valid_sw_formats)
121  return AVERROR(ENOMEM);
122 
123  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
124  constraints->valid_sw_formats[i] = supported_formats[i];
126 
127  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
128  if (!constraints->valid_hw_formats)
129  return AVERROR(ENOMEM);
130 
131  constraints->valid_hw_formats[0] = AV_PIX_FMT_VIDEOTOOLBOX;
132  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
133 
134  return 0;
135 }
136 
138 {
139  int i;
140  for (i = 0; i < FF_ARRAY_ELEMS(cv_pix_fmts); i++) {
141  if (cv_pix_fmts[i].cv_fmt == cv_fmt)
142  return cv_pix_fmts[i].pix_fmt;
143  }
144  return AV_PIX_FMT_NONE;
145 }
146 
148 {
150 }
151 
153 {
154  int i;
155  for (i = 0; i < FF_ARRAY_ELEMS(cv_pix_fmts); i++) {
157  return cv_pix_fmts[i].cv_fmt;
158  }
159  return 0;
160 }
161 
163 {
164  VTFramesContext *fctx = ctx->internal->priv;
165  CVReturn err;
166  CFNumberRef w, h, pixfmt;
167  uint32_t cv_pixfmt;
168  CFMutableDictionaryRef attributes, iosurface_properties;
169 
170  attributes = CFDictionaryCreateMutable(
171  NULL,
172  2,
173  &kCFTypeDictionaryKeyCallBacks,
174  &kCFTypeDictionaryValueCallBacks);
175 
176  cv_pixfmt = av_map_videotoolbox_format_from_pixfmt(ctx->sw_format);
177  pixfmt = CFNumberCreate(NULL, kCFNumberSInt32Type, &cv_pixfmt);
178  CFDictionarySetValue(
179  attributes,
180  kCVPixelBufferPixelFormatTypeKey,
181  pixfmt);
182  CFRelease(pixfmt);
183 
184  iosurface_properties = CFDictionaryCreateMutable(
185  NULL,
186  0,
187  &kCFTypeDictionaryKeyCallBacks,
188  &kCFTypeDictionaryValueCallBacks);
189  CFDictionarySetValue(attributes, kCVPixelBufferIOSurfacePropertiesKey, iosurface_properties);
190  CFRelease(iosurface_properties);
191 
192  w = CFNumberCreate(NULL, kCFNumberSInt32Type, &ctx->width);
193  h = CFNumberCreate(NULL, kCFNumberSInt32Type, &ctx->height);
194  CFDictionarySetValue(attributes, kCVPixelBufferWidthKey, w);
195  CFDictionarySetValue(attributes, kCVPixelBufferHeightKey, h);
196  CFRelease(w);
197  CFRelease(h);
198 
199  err = CVPixelBufferPoolCreate(
200  NULL,
201  NULL,
202  attributes,
203  &fctx->pool);
204  CFRelease(attributes);
205 
206  if (err == kCVReturnSuccess)
207  return 0;
208 
209  av_log(ctx, AV_LOG_ERROR, "Error creating CVPixelBufferPool: %d\n", err);
210  return AVERROR_EXTERNAL;
211 }
212 
213 static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
214 {
215  CVPixelBufferRelease((CVPixelBufferRef)data);
216 }
217 
218 static AVBufferRef *vt_pool_alloc_buffer(void *opaque, size_t size)
219 {
220  CVPixelBufferRef pixbuf;
221  AVBufferRef *buf;
222  CVReturn err;
223  AVHWFramesContext *ctx = opaque;
224  VTFramesContext *fctx = ctx->internal->priv;
225 
226  err = CVPixelBufferPoolCreatePixelBuffer(
227  NULL,
228  fctx->pool,
229  &pixbuf
230  );
231  if (err != kCVReturnSuccess) {
232  av_log(ctx, AV_LOG_ERROR, "Failed to create pixel buffer from pool: %d\n", err);
233  return NULL;
234  }
235 
236  buf = av_buffer_create((uint8_t *)pixbuf, size,
238  if (!buf) {
239  CVPixelBufferRelease(pixbuf);
240  return NULL;
241  }
242  return buf;
243 }
244 
246 {
247  VTFramesContext *fctx = ctx->internal->priv;
248  if (fctx->pool) {
249  CVPixelBufferPoolRelease(fctx->pool);
250  fctx->pool = NULL;
251  }
252 }
253 
255 {
256  int i, ret;
257 
258  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
259  if (ctx->sw_format == supported_formats[i])
260  break;
261  }
263  av_log(ctx, AV_LOG_ERROR, "Pixel format '%s' is not supported\n",
264  av_get_pix_fmt_name(ctx->sw_format));
265  return AVERROR(ENOSYS);
266  }
267 
268  if (!ctx->pool) {
269  ctx->internal->pool_internal = av_buffer_pool_init2(
270  sizeof(CVPixelBufferRef), ctx, vt_pool_alloc_buffer, NULL);
271  if (!ctx->internal->pool_internal)
272  return AVERROR(ENOMEM);
273  }
274 
275  ret = vt_pool_alloc(ctx);
276  if (ret < 0)
277  return ret;
278 
279  return 0;
280 }
281 
283 {
284  frame->buf[0] = av_buffer_pool_get(ctx->pool);
285  if (!frame->buf[0])
286  return AVERROR(ENOMEM);
287 
288  frame->data[3] = frame->buf[0]->data;
289  frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
290  frame->width = ctx->width;
291  frame->height = ctx->height;
292 
293  return 0;
294 }
295 
298  enum AVPixelFormat **formats)
299 {
300  enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
301  if (!fmts)
302  return AVERROR(ENOMEM);
303 
304  fmts[0] = ctx->sw_format;
305  fmts[1] = AV_PIX_FMT_NONE;
306 
307  *formats = fmts;
308  return 0;
309 }
310 
312 {
313  CVPixelBufferRef pixbuf = (CVPixelBufferRef)hwmap->source->data[3];
314 
315  CVPixelBufferUnlockBaseAddress(pixbuf, (uintptr_t)hwmap->priv);
316 }
317 
318 static int vt_pixbuf_set_par(void *log_ctx,
319  CVPixelBufferRef pixbuf, const AVFrame *src)
320 {
321  CFMutableDictionaryRef par = NULL;
322  CFNumberRef num = NULL, den = NULL;
323  AVRational avpar = src->sample_aspect_ratio;
324 
325  if (avpar.num == 0)
326  return 0;
327 
328  av_reduce(&avpar.num, &avpar.den,
329  avpar.num, avpar.den,
330  0xFFFFFFFF);
331 
332  num = CFNumberCreate(kCFAllocatorDefault,
333  kCFNumberIntType,
334  &avpar.num);
335 
336  den = CFNumberCreate(kCFAllocatorDefault,
337  kCFNumberIntType,
338  &avpar.den);
339 
340  par = CFDictionaryCreateMutable(kCFAllocatorDefault,
341  2,
342  &kCFCopyStringDictionaryKeyCallBacks,
343  &kCFTypeDictionaryValueCallBacks);
344 
345  if (!par || !num || !den) {
346  if (par) CFRelease(par);
347  if (num) CFRelease(num);
348  if (den) CFRelease(den);
349  return AVERROR(ENOMEM);
350  }
351 
352  CFDictionarySetValue(
353  par,
354  kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
355  num);
356  CFDictionarySetValue(
357  par,
358  kCVImageBufferPixelAspectRatioVerticalSpacingKey,
359  den);
360 
361  CVBufferSetAttachment(
362  pixbuf,
363  kCVImageBufferPixelAspectRatioKey,
364  par,
365  kCVAttachmentMode_ShouldPropagate
366  );
367 
368  CFRelease(par);
369  CFRelease(num);
370  CFRelease(den);
371 
372  return 0;
373 }
374 
376 {
377  switch (loc) {
378  case AVCHROMA_LOC_LEFT:
379  return kCVImageBufferChromaLocation_Left;
380  case AVCHROMA_LOC_CENTER:
381  return kCVImageBufferChromaLocation_Center;
382  case AVCHROMA_LOC_TOP:
383  return kCVImageBufferChromaLocation_Top;
384  case AVCHROMA_LOC_BOTTOM:
385  return kCVImageBufferChromaLocation_Bottom;
387  return kCVImageBufferChromaLocation_TopLeft;
389  return kCVImageBufferChromaLocation_BottomLeft;
390  default:
391  return NULL;
392  }
393 }
394 
395 static int vt_pixbuf_set_chromaloc(void *log_ctx,
396  CVPixelBufferRef pixbuf, const AVFrame *src)
397 {
398  CFStringRef loc = av_map_videotoolbox_chroma_loc_from_av(src->chroma_location);
399 
400  if (loc) {
401  CVBufferSetAttachment(
402  pixbuf,
403  kCVImageBufferChromaLocationTopFieldKey,
404  loc,
405  kCVAttachmentMode_ShouldPropagate);
406  }
407 
408  return 0;
409 }
410 
412 {
413  switch (space) {
414  case AVCOL_SPC_BT2020_CL:
416 #if HAVE_KCVIMAGEBUFFERYCBCRMATRIX_ITU_R_2020
417  if (__builtin_available(macOS 10.11, iOS 9, *))
419 #endif
420  return CFSTR("ITU_R_2020");
421  case AVCOL_SPC_BT470BG:
422  case AVCOL_SPC_SMPTE170M:
423  return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
424  case AVCOL_SPC_BT709:
425  return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
426  case AVCOL_SPC_SMPTE240M:
427  return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
428  default:
429 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
430  if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
431  return CVYCbCrMatrixGetStringForIntegerCodePoint(space);
432 #endif
434  return NULL;
435  }
436 }
437 
439 {
440  switch (pri) {
441  case AVCOL_PRI_BT2020:
442 #if HAVE_KCVIMAGEBUFFERCOLORPRIMARIES_ITU_R_2020
443  if (__builtin_available(macOS 10.11, iOS 9, *))
445 #endif
446  return CFSTR("ITU_R_2020");
447  case AVCOL_PRI_BT709:
448  return kCVImageBufferColorPrimaries_ITU_R_709_2;
449  case AVCOL_PRI_SMPTE170M:
450  return kCVImageBufferColorPrimaries_SMPTE_C;
451  case AVCOL_PRI_BT470BG:
452  return kCVImageBufferColorPrimaries_EBU_3213;
453  default:
454 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
455  if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
456  return CVColorPrimariesGetStringForIntegerCodePoint(pri);
457 #endif
459  return NULL;
460  }
461 }
462 
464 {
465 
466  switch (trc) {
467  case AVCOL_TRC_SMPTE2084:
468 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
469  if (__builtin_available(macOS 10.13, iOS 11, *))
470  return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
471 #endif
472  return CFSTR("SMPTE_ST_2084_PQ");
473  case AVCOL_TRC_BT2020_10:
474  case AVCOL_TRC_BT2020_12:
475 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2020
476  if (__builtin_available(macOS 10.11, iOS 9, *))
478 #endif
479  return CFSTR("ITU_R_2020");
480  case AVCOL_TRC_BT709:
481  return kCVImageBufferTransferFunction_ITU_R_709_2;
482  case AVCOL_TRC_SMPTE240M:
483  return kCVImageBufferTransferFunction_SMPTE_240M_1995;
484  case AVCOL_TRC_SMPTE428:
485 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_428_1
486  if (__builtin_available(macOS 10.12, iOS 10, *))
487  return kCVImageBufferTransferFunction_SMPTE_ST_428_1;
488 #endif
489  return CFSTR("SMPTE_ST_428_1");
491 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
492  if (__builtin_available(macOS 10.13, iOS 11, *))
493  return kCVImageBufferTransferFunction_ITU_R_2100_HLG;
494 #endif
495  return CFSTR("ITU_R_2100_HLG");
496  case AVCOL_TRC_GAMMA22:
497  return kCVImageBufferTransferFunction_UseGamma;
498  case AVCOL_TRC_GAMMA28:
499  return kCVImageBufferTransferFunction_UseGamma;
500  default:
501 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
502  if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
503  return CVTransferFunctionGetStringForIntegerCodePoint(trc);
504 #endif
506  return NULL;
507  }
508 }
509 
510 static int vt_pixbuf_set_colorspace(void *log_ctx,
511  CVPixelBufferRef pixbuf, const AVFrame *src)
512 {
513  CFStringRef colormatrix = NULL, colorpri = NULL, colortrc = NULL;
514  Float32 gamma = 0;
515 
516  colormatrix = av_map_videotoolbox_color_matrix_from_av(src->colorspace);
517  if (!colormatrix && src->colorspace != AVCOL_SPC_UNSPECIFIED)
518  av_log(log_ctx, AV_LOG_WARNING, "Color space %s is not supported.\n", av_color_space_name(src->colorspace));
519 
520  colorpri = av_map_videotoolbox_color_primaries_from_av(src->color_primaries);
521  if (!colorpri && src->color_primaries != AVCOL_PRI_UNSPECIFIED)
522  av_log(log_ctx, AV_LOG_WARNING, "Color primaries %s is not supported.\n", av_color_primaries_name(src->color_primaries));
523 
524  colortrc = av_map_videotoolbox_color_trc_from_av(src->color_trc);
525  if (!colortrc && src->color_trc != AVCOL_TRC_UNSPECIFIED)
526  av_log(log_ctx, AV_LOG_WARNING, "Color transfer function %s is not supported.\n", av_color_transfer_name(src->color_trc));
527 
528  if (src->color_trc == AVCOL_TRC_GAMMA22)
529  gamma = 2.2;
530  else if (src->color_trc == AVCOL_TRC_GAMMA28)
531  gamma = 2.8;
532 
533  if (colormatrix) {
534  CVBufferSetAttachment(
535  pixbuf,
536  kCVImageBufferYCbCrMatrixKey,
537  colormatrix,
538  kCVAttachmentMode_ShouldPropagate);
539  }
540  if (colorpri) {
541  CVBufferSetAttachment(
542  pixbuf,
543  kCVImageBufferColorPrimariesKey,
544  colorpri,
545  kCVAttachmentMode_ShouldPropagate);
546  }
547  if (colortrc) {
548  CVBufferSetAttachment(
549  pixbuf,
550  kCVImageBufferTransferFunctionKey,
551  colortrc,
552  kCVAttachmentMode_ShouldPropagate);
553  }
554  if (gamma != 0) {
555  CFNumberRef gamma_level = CFNumberCreate(NULL, kCFNumberFloat32Type, &gamma);
556  CVBufferSetAttachment(
557  pixbuf,
558  kCVImageBufferGammaLevelKey,
559  gamma_level,
560  kCVAttachmentMode_ShouldPropagate);
561  CFRelease(gamma_level);
562  }
563 
564  return 0;
565 }
566 
567 static int vt_pixbuf_set_attachments(void *log_ctx,
568  CVPixelBufferRef pixbuf, const AVFrame *src)
569 {
570  int ret;
571  ret = vt_pixbuf_set_par(log_ctx, pixbuf, src);
572  if (ret < 0)
573  return ret;
574  ret = vt_pixbuf_set_colorspace(log_ctx, pixbuf, src);
575  if (ret < 0)
576  return ret;
577  ret = vt_pixbuf_set_chromaloc(log_ctx, pixbuf, src);
578  if (ret < 0)
579  return ret;
580  return 0;
581 }
582 
584  CVPixelBufferRef pixbuf, const AVFrame *src)
585 {
586  return vt_pixbuf_set_attachments(log_ctx, pixbuf, src);
587 }
588 
590  int flags)
591 {
592  CVPixelBufferRef pixbuf = (CVPixelBufferRef)src->data[3];
593  OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
594  CVReturn err;
595  uint32_t map_flags = 0;
596  int ret;
597  int i;
598  enum AVPixelFormat format;
599 
601  if (dst->format != format) {
602  av_log(ctx, AV_LOG_ERROR, "Unsupported or mismatching pixel format: %s\n",
603  av_fourcc2str(pixel_format));
604  return AVERROR_UNKNOWN;
605  }
606 
607  if (CVPixelBufferGetWidth(pixbuf) != ctx->width ||
608  CVPixelBufferGetHeight(pixbuf) != ctx->height) {
609  av_log(ctx, AV_LOG_ERROR, "Inconsistent frame dimensions.\n");
610  return AVERROR_UNKNOWN;
611  }
612 
613  if (flags == AV_HWFRAME_MAP_READ)
614  map_flags = kCVPixelBufferLock_ReadOnly;
615 
616  err = CVPixelBufferLockBaseAddress(pixbuf, map_flags);
617  if (err != kCVReturnSuccess) {
618  av_log(ctx, AV_LOG_ERROR, "Error locking the pixel buffer.\n");
619  return AVERROR_UNKNOWN;
620  }
621 
622  if (CVPixelBufferIsPlanar(pixbuf)) {
623  int planes = CVPixelBufferGetPlaneCount(pixbuf);
624  for (i = 0; i < planes; i++) {
625  dst->data[i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i);
626  dst->linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i);
627  }
628  } else {
629  dst->data[0] = CVPixelBufferGetBaseAddress(pixbuf);
630  dst->linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
631  }
632 
633  ret = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, vt_unmap,
634  (void *)(uintptr_t)map_flags);
635  if (ret < 0)
636  goto unlock;
637 
638  return 0;
639 
640 unlock:
641  CVPixelBufferUnlockBaseAddress(pixbuf, map_flags);
642  return ret;
643 }
644 
646  AVFrame *dst, const AVFrame *src)
647 {
648  AVFrame *map;
649  int err;
650 
651  if (dst->width > hwfc->width || dst->height > hwfc->height)
652  return AVERROR(EINVAL);
653 
654  map = av_frame_alloc();
655  if (!map)
656  return AVERROR(ENOMEM);
657  map->format = dst->format;
658 
659  err = vt_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
660  if (err)
661  goto fail;
662 
663  map->width = dst->width;
664  map->height = dst->height;
665 
666  err = av_frame_copy(dst, map);
667  if (err)
668  goto fail;
669 
670  err = 0;
671 fail:
672  av_frame_free(&map);
673  return err;
674 }
675 
677  AVFrame *dst, const AVFrame *src)
678 {
679  AVFrame *map;
680  int err;
681 
682  if (src->width > hwfc->width || src->height > hwfc->height)
683  return AVERROR(EINVAL);
684 
685  map = av_frame_alloc();
686  if (!map)
687  return AVERROR(ENOMEM);
688  map->format = src->format;
689 
691  if (err)
692  goto fail;
693 
694  map->width = src->width;
695  map->height = src->height;
696 
697  err = av_frame_copy(map, src);
698  if (err)
699  goto fail;
700 
701  err = vt_pixbuf_set_attachments(hwfc, (CVPixelBufferRef)dst->data[3], src);
702  if (err)
703  goto fail;
704 
705  err = 0;
706 fail:
707  av_frame_free(&map);
708  return err;
709 }
710 
711 static int vt_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
712  const AVFrame *src, int flags)
713 {
714  int err;
715 
716  if (dst->format == AV_PIX_FMT_NONE)
717  dst->format = hwfc->sw_format;
718  else if (dst->format != hwfc->sw_format)
719  return AVERROR(ENOSYS);
720 
721  err = vt_map_frame(hwfc, dst, src, flags);
722  if (err)
723  return err;
724 
725  dst->width = src->width;
726  dst->height = src->height;
727 
728  err = av_frame_copy_props(dst, src);
729  if (err)
730  return err;
731 
732  return 0;
733 }
734 
735 static int vt_device_create(AVHWDeviceContext *ctx, const char *device,
736  AVDictionary *opts, int flags)
737 {
738  if (device && device[0]) {
739  av_log(ctx, AV_LOG_ERROR, "Device selection unsupported.\n");
740  return AVERROR_UNKNOWN;
741  }
742 
743  return 0;
744 }
745 
748  .name = "videotoolbox",
749 
750  .frames_priv_size = sizeof(VTFramesContext),
751 
752  .device_create = vt_device_create,
753  .frames_init = vt_frames_init,
754  .frames_get_buffer = vt_get_buffer,
755  .frames_get_constraints = vt_frames_get_constraints,
756  .frames_uninit = vt_frames_uninit,
757  .transfer_get_formats = vt_transfer_get_formats,
758  .transfer_data_to = vt_transfer_data_to,
759  .transfer_data_from = vt_transfer_data_from,
760  .map_from = vt_map_from,
761 
763 };
formats
formats
Definition: signature.h:48
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
planes
static const struct @346 planes[]
av_map_videotoolbox_color_trc_from_av
CFStringRef av_map_videotoolbox_color_trc_from_av(enum AVColorTransferCharacteristic trc)
Convert an AVColorTransferCharacteristic to a VideoToolbox/CoreVideo color transfer function string.
Definition: hwcontext_videotoolbox.c:463
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
space
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
Definition: undefined.txt:4
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:558
HWMapDescriptor::source
AVFrame * source
A reference to the original source of the mapping.
Definition: hwcontext_internal.h:136
av_map_videotoolbox_color_matrix_from_av
CFStringRef av_map_videotoolbox_color_matrix_from_av(enum AVColorSpace space)
Convert an AVColorSpace to a VideoToolbox/CoreVideo color matrix string.
Definition: hwcontext_videotoolbox.c:411
vt_transfer_data_from
static int vt_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:645
AVCHROMA_LOC_BOTTOM
@ AVCHROMA_LOC_BOTTOM
Definition: pixfmt.h:687
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_videotoolbox.c:43
buffer_internal.h
pixdesc.h
AVFrame::width
int width
Definition: frame.h:402
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:561
av_vt_pixbuf_set_attachments
int av_vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:583
data
const char data[16]
Definition: mxf.c:146
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
@ kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
Definition: videotoolboxenc.c:52
cv_pix_fmts
static const struct @333 cv_pix_fmts[]
AVCOL_TRC_BT2020_12
@ AVCOL_TRC_BT2020_12
ITU-R BT2020 for 12-bit system.
Definition: pixfmt.h:574
vt_frames_init
static int vt_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_videotoolbox.c:254
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVDictionary
Definition: dict.c:32
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:742
AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
Definition: hwcontext.h:34
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:533
vt_map_frame
static int vt_map_frame(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_videotoolbox.c:589
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:151
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
VTFramesContext::pool
CVPixelBufferPoolRef pool
Definition: hwcontext_videotoolbox.c:37
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:599
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
ff_hwcontext_type_videotoolbox
const HWContextType ff_hwcontext_type_videotoolbox
Definition: hwcontext_videotoolbox.c:746
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
vt_pixbuf_set_attachments
static int vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:567
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:593
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:524
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3264
fail
#define fail()
Definition: checkasm.h:134
AVCOL_TRC_GAMMA28
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
Definition: pixfmt.h:564
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
kCVImageBufferYCbCrMatrix_ITU_R_2020
CFStringRef kCVImageBufferYCbCrMatrix_ITU_R_2020
Definition: videotoolboxenc.c:70
vt_device_create
static int vt_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_videotoolbox.c:735
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
AVRational::num
int num
Numerator.
Definition: rational.h:59
vt_frames_uninit
static void vt_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_videotoolbox.c:245
AVCOL_TRC_GAMMA22
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:563
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
av_map_videotoolbox_color_primaries_from_av
CFStringRef av_map_videotoolbox_color_primaries_from_av(enum AVColorPrimaries pri)
Convert an AVColorPrimaries to a VideoToolbox/CoreVideo color primaries string.
Definition: hwcontext_videotoolbox.c:438
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
vt_pixbuf_set_chromaloc
static int vt_pixbuf_set_chromaloc(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:395
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
full_range
bool full_range
Definition: hwcontext_videotoolbox.c:42
AVCHROMA_LOC_TOP
@ AVCHROMA_LOC_TOP
Definition: pixfmt.h:685
supported_formats
static enum AVPixelFormat supported_formats[]
Definition: hwcontext_videotoolbox.c:81
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:594
vt_transfer_get_formats
static int vt_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_videotoolbox.c:296
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:536
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:540
vt_pixbuf_set_colorspace
static int vt_pixbuf_set_colorspace(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:510
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:541
opts
AVDictionary * opts
Definition: movenc.c:50
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:594
vt_map_from
static int vt_map_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_videotoolbox.c:711
cv_fmt
uint32_t cv_fmt
Definition: hwcontext_videotoolbox.c:41
av_map_videotoolbox_format_from_pixfmt
uint32_t av_map_videotoolbox_format_from_pixfmt(enum AVPixelFormat pix_fmt)
Convert an AVPixelFormat to a VideoToolbox (actually CoreVideo) format.
Definition: hwcontext_videotoolbox.c:147
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:682
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:684
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:535
AV_PIX_FMT_P410
#define AV_PIX_FMT_P410
Definition: pixfmt.h:520
av_color_primaries_name
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
Definition: pixdesc.c:3222
AVCOL_TRC_BT2020_10
@ AVCOL_TRC_BT2020_10
ITU-R BT2020 for 10-bit system.
Definition: pixfmt.h:573
vt_pool_alloc_buffer
static AVBufferRef * vt_pool_alloc_buffer(void *opaque, size_t size)
Definition: hwcontext_videotoolbox.c:218
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:544
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:575
hwcontext_videotoolbox.h
AVCOL_TRC_SMPTE240M
@ AVCOL_TRC_SMPTE240M
Definition: pixfmt.h:566
vt_pixbuf_set_par
static int vt_pixbuf_set_par(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:318
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:762
kCVImageBufferTransferFunction_ITU_R_2020
CFStringRef kCVImageBufferTransferFunction_ITU_R_2020
Definition: videotoolboxenc.c:69
size
int size
Definition: twinvq_data.h:10344
AV_PIX_FMT_AYUV64
#define AV_PIX_FMT_AYUV64
Definition: pixfmt.h:507
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:417
AV_PIX_FMT_NV16
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:191
buffer.h
vt_frames_get_constraints
static int vt_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_videotoolbox.c:112
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AV_PIX_FMT_P216
#define AV_PIX_FMT_P216
Definition: pixfmt.h:521
AV_PIX_FMT_P210
#define AV_PIX_FMT_P210
Definition: pixfmt.h:519
vt_unmap
static void vt_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
Definition: hwcontext_videotoolbox.c:311
vt_transfer_data_to
static int vt_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_videotoolbox.c:676
AVCOL_TRC_BT709
@ AVCOL_TRC_BT709
also ITU-R BT1361
Definition: pixfmt.h:560
AVChromaLocation
AVChromaLocation
Location of chroma samples.
Definition: pixfmt.h:680
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:595
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:302
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:598
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:587
AV_PIX_FMT_NV24
@ AV_PIX_FMT_NV24
planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:368
common.h
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:590
kCVPixelFormatType_420YpCbCr10BiPlanarFullRange
@ kCVPixelFormatType_420YpCbCr10BiPlanarFullRange
Definition: videotoolboxenc.c:51
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
vt_get_buffer
static int vt_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_videotoolbox.c:282
videotoolbox_buffer_release
static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
Definition: hwcontext_videotoolbox.c:213
ret
ret
Definition: filter_design.txt:187
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_map_videotoolbox_format_to_pixfmt
enum AVPixelFormat av_map_videotoolbox_format_to_pixfmt(uint32_t cv_fmt)
Convert a VideoToolbox (actually CoreVideo) format to AVPixelFormat.
Definition: hwcontext_videotoolbox.c:137
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::height
int height
Definition: frame.h:402
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:579
kCVImageBufferColorPrimaries_ITU_R_2020
CFStringRef kCVImageBufferColorPrimaries_ITU_R_2020
Definition: videotoolboxenc.c:68
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:683
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:508
av_map_videotoolbox_chroma_loc_from_av
CFStringRef av_map_videotoolbox_chroma_loc_from_av(enum AVChromaLocation loc)
Convert an AVChromaLocation to a VideoToolbox/CoreVideo chroma location string.
Definition: hwcontext_videotoolbox.c:375
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:534
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:528
hwcontext_internal.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
AV_PIX_FMT_P416
#define AV_PIX_FMT_P416
Definition: pixfmt.h:522
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
av_map_videotoolbox_format_from_pixfmt2
uint32_t av_map_videotoolbox_format_from_pixfmt2(enum AVPixelFormat pix_fmt, bool full_range)
Same as av_map_videotoolbox_format_from_pixfmt function, but can map and return full range pixel form...
Definition: hwcontext_videotoolbox.c:152
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
AVCOL_TRC_SMPTE428
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
Definition: pixfmt.h:577
h
h
Definition: vp9dsp_template.c:2038
VTFramesContext
Definition: hwcontext_videotoolbox.c:36
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:589
HWMapDescriptor
Definition: hwcontext_internal.h:132
av_color_transfer_name
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:3243
AVCHROMA_LOC_BOTTOMLEFT
@ AVCHROMA_LOC_BOTTOMLEFT
Definition: pixfmt.h:686
vt_pool_alloc
static int vt_pool_alloc(AVHWFramesContext *ctx)
Definition: hwcontext_videotoolbox.c:162
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2808
av_fourcc2str
#define av_fourcc2str(fourcc)
Definition: avutil.h:354