Go to the documentation of this file.
24 #include <VideoToolbox/VideoToolbox.h>
48 #ifdef kCFCoreFoundationVersionNumber10_7
49 { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV12 },
50 { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV12 },
53 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
57 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
58 { kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV16 },
59 { kCVPixelFormatType_422YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV16 },
61 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
62 { kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P210 },
63 { kCVPixelFormatType_422YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P210 },
65 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
66 { kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P216 },
68 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
69 { kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV24 },
70 { kCVPixelFormatType_444YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV24 },
72 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
73 { kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P410 },
74 { kCVPixelFormatType_444YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P410 },
76 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
77 { kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P416 },
82 #ifdef kCFCoreFoundationVersionNumber10_7
88 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
91 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
94 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
97 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
100 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
103 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
106 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
113 const void *hwconfig,
168 CFMutableDictionaryRef attributes, iosurface_properties;
170 attributes = CFDictionaryCreateMutable(
173 &kCFTypeDictionaryKeyCallBacks,
174 &kCFTypeDictionaryValueCallBacks);
177 pixfmt = CFNumberCreate(
NULL, kCFNumberSInt32Type, &cv_pixfmt);
178 CFDictionarySetValue(
180 kCVPixelBufferPixelFormatTypeKey,
184 iosurface_properties = CFDictionaryCreateMutable(
187 &kCFTypeDictionaryKeyCallBacks,
188 &kCFTypeDictionaryValueCallBacks);
189 CFDictionarySetValue(attributes, kCVPixelBufferIOSurfacePropertiesKey, iosurface_properties);
190 CFRelease(iosurface_properties);
192 w = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->width);
193 h = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->height);
194 CFDictionarySetValue(attributes, kCVPixelBufferWidthKey,
w);
195 CFDictionarySetValue(attributes, kCVPixelBufferHeightKey,
h);
199 err = CVPixelBufferPoolCreate(
204 CFRelease(attributes);
206 if (err == kCVReturnSuccess)
222 CVPixelBufferPoolRelease(fctx->
pool);
244 if (!
ctx->internal->pool_internal)
257 CVPixelBufferRelease((CVPixelBufferRef)
data);
264 if (
ctx->pool &&
ctx->pool->size != 0) {
269 CVPixelBufferRef pixbuf;
273 err = CVPixelBufferPoolCreatePixelBuffer(
278 if (err != kCVReturnSuccess) {
285 CVPixelBufferRelease(pixbuf);
307 fmts[0] =
ctx->sw_format;
316 CVPixelBufferRef pixbuf = (CVPixelBufferRef)hwmap->
source->
data[3];
318 CVPixelBufferUnlockBaseAddress(pixbuf, (uintptr_t)hwmap->
priv);
324 CFMutableDictionaryRef par =
NULL;
335 num = CFNumberCreate(kCFAllocatorDefault,
339 den = CFNumberCreate(kCFAllocatorDefault,
343 par = CFDictionaryCreateMutable(kCFAllocatorDefault,
345 &kCFCopyStringDictionaryKeyCallBacks,
346 &kCFTypeDictionaryValueCallBacks);
348 if (!par || !num || !den) {
349 if (par) CFRelease(par);
350 if (num) CFRelease(num);
351 if (den) CFRelease(den);
355 CFDictionarySetValue(
357 kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
359 CFDictionarySetValue(
361 kCVImageBufferPixelAspectRatioVerticalSpacingKey,
364 CVBufferSetAttachment(
366 kCVImageBufferPixelAspectRatioKey,
368 kCVAttachmentMode_ShouldPropagate
382 return kCVImageBufferChromaLocation_Left;
384 return kCVImageBufferChromaLocation_Center;
386 return kCVImageBufferChromaLocation_Top;
388 return kCVImageBufferChromaLocation_Bottom;
390 return kCVImageBufferChromaLocation_TopLeft;
392 return kCVImageBufferChromaLocation_BottomLeft;
404 CVBufferSetAttachment(
406 kCVImageBufferChromaLocationTopFieldKey,
408 kCVAttachmentMode_ShouldPropagate);
419 #if HAVE_KCVIMAGEBUFFERYCBCRMATRIX_ITU_R_2020
420 if (__builtin_available(macOS 10.11, iOS 9, *))
423 return CFSTR(
"ITU_R_2020");
426 return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
428 return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
430 return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
432 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
433 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
434 return CVYCbCrMatrixGetStringForIntegerCodePoint(
space);
445 #if HAVE_KCVIMAGEBUFFERCOLORPRIMARIES_ITU_R_2020
446 if (__builtin_available(macOS 10.11, iOS 9, *))
449 return CFSTR(
"ITU_R_2020");
451 return kCVImageBufferColorPrimaries_ITU_R_709_2;
453 return kCVImageBufferColorPrimaries_SMPTE_C;
455 return kCVImageBufferColorPrimaries_EBU_3213;
457 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
458 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
459 return CVColorPrimariesGetStringForIntegerCodePoint(pri);
471 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
472 if (__builtin_available(macOS 10.13, iOS 11, *))
473 return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
475 return CFSTR(
"SMPTE_ST_2084_PQ");
478 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2020
479 if (__builtin_available(macOS 10.11, iOS 9, *))
482 return CFSTR(
"ITU_R_2020");
484 return kCVImageBufferTransferFunction_ITU_R_709_2;
486 return kCVImageBufferTransferFunction_SMPTE_240M_1995;
488 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_428_1
489 if (__builtin_available(macOS 10.12, iOS 10, *))
490 return kCVImageBufferTransferFunction_SMPTE_ST_428_1;
492 return CFSTR(
"SMPTE_ST_428_1");
494 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
495 if (__builtin_available(macOS 10.13, iOS 11, *))
496 return kCVImageBufferTransferFunction_ITU_R_2100_HLG;
498 return CFSTR(
"ITU_R_2100_HLG");
500 return kCVImageBufferTransferFunction_UseGamma;
502 return kCVImageBufferTransferFunction_UseGamma;
504 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
505 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
506 return CVTransferFunctionGetStringForIntegerCodePoint(trc);
516 CFStringRef colormatrix =
NULL, colorpri =
NULL, colortrc =
NULL;
537 CVBufferSetAttachment(
539 kCVImageBufferYCbCrMatrixKey,
541 kCVAttachmentMode_ShouldPropagate);
544 CVBufferSetAttachment(
546 kCVImageBufferColorPrimariesKey,
548 kCVAttachmentMode_ShouldPropagate);
551 CVBufferSetAttachment(
553 kCVImageBufferTransferFunctionKey,
555 kCVAttachmentMode_ShouldPropagate);
558 CFNumberRef gamma_level = CFNumberCreate(
NULL, kCFNumberFloat32Type, &gamma);
559 CVBufferSetAttachment(
561 kCVImageBufferGammaLevelKey,
563 kCVAttachmentMode_ShouldPropagate);
564 CFRelease(gamma_level);
595 CVPixelBufferRef pixbuf = (CVPixelBufferRef)
src->data[3];
596 OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
598 uint32_t map_flags = 0;
610 if (CVPixelBufferGetWidth(pixbuf) !=
ctx->width ||
611 CVPixelBufferGetHeight(pixbuf) !=
ctx->height) {
617 map_flags = kCVPixelBufferLock_ReadOnly;
619 err = CVPixelBufferLockBaseAddress(pixbuf, map_flags);
620 if (err != kCVReturnSuccess) {
625 if (CVPixelBufferIsPlanar(pixbuf)) {
626 int planes = CVPixelBufferGetPlaneCount(pixbuf);
628 dst->
data[
i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf,
i);
629 dst->
linesize[
i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf,
i);
632 dst->
data[0] = CVPixelBufferGetBaseAddress(pixbuf);
633 dst->
linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
637 (
void *)(uintptr_t)map_flags);
644 CVPixelBufferUnlockBaseAddress(pixbuf, map_flags);
691 map->format =
src->format;
698 map->height =
src->height;
717 if (device && device[0]) {
727 .name =
"videotoolbox",
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
CFStringRef av_map_videotoolbox_color_trc_from_av(enum AVColorTransferCharacteristic trc)
Convert an AVColorTransferCharacteristic to a VideoToolbox/CoreVideo color transfer function string.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
AVColorTransferCharacteristic
Color Transfer Characteristic.
AVFrame * source
A reference to the original source of the mapping.
CFStringRef av_map_videotoolbox_color_matrix_from_av(enum AVColorSpace space)
Convert an AVColorSpace to a VideoToolbox/CoreVideo color matrix string.
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
static int vt_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
This structure describes decoded (raw) audio or video data.
enum AVPixelFormat pix_fmt
static const struct @308 cv_pix_fmts[]
int av_vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
static AVBufferRef * vt_dummy_pool_alloc(void *opaque, size_t size)
@ AVCOL_TRC_BT2020_12
ITU-R BT2020 for 12-bit system.
static int vt_frames_init(AVHWFramesContext *ctx)
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
AVColorPrimaries
Chromaticity coordinates of the source primaries.
static int vt_map_frame(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
void * priv
Hardware-specific private data associated with the mapping.
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
CVPixelBufferPoolRef pool
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
const HWContextType ff_hwcontext_type_videotoolbox
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
static int vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
const char * av_color_space_name(enum AVColorSpace space)
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
static int vt_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
static void vt_frames_uninit(AVHWFramesContext *ctx)
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
CFStringRef av_map_videotoolbox_color_primaries_from_av(enum AVColorPrimaries pri)
Convert an AVColorPrimaries to a VideoToolbox/CoreVideo color primaries string.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
static int vt_pixbuf_set_chromaloc(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
static enum AVPixelFormat supported_formats[]
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
static int vt_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
static enum AVPixelFormat pix_fmts[]
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
static int vt_pixbuf_set_colorspace(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
static const struct @321 planes[]
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
uint32_t av_map_videotoolbox_format_from_pixfmt(enum AVPixelFormat pix_fmt)
Convert an AVPixelFormat to a VideoToolbox (actually CoreVideo) format.
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Rational number (pair of numerator and denominator).
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
@ AVCOL_TRC_BT2020_10
ITU-R BT2020 for 10-bit system.
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
@ AVCOL_PRI_BT2020
ITU-R BT2020.
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
static int vt_pixbuf_set_par(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
#define AV_PIX_FMT_AYUV64
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
static int vt_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
#define AVERROR_EXTERNAL
Generic error in an external library.
static void vt_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
static int vt_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
@ AVCOL_TRC_BT709
also ITU-R BT1361
AVChromaLocation
Location of chroma samples.
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
#define i(width, name, range_min, range_max)
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
#define av_malloc_array(a, b)
AVColorSpace
YUV colorspace type.
@ AV_PIX_FMT_NV24
planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
static int vt_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
enum AVPixelFormat pixfmt
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
enum AVPixelFormat av_map_videotoolbox_format_to_pixfmt(uint32_t cv_fmt)
Convert a VideoToolbox (actually CoreVideo) format to AVPixelFormat.
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
CFStringRef av_map_videotoolbox_chroma_loc_from_av(enum AVChromaLocation loc)
Convert an AVChromaLocation to a VideoToolbox/CoreVideo chroma location string.
A reference to a data buffer.
const VDPAUPixFmtMap * map
uint32_t av_map_videotoolbox_format_from_pixfmt2(enum AVPixelFormat pix_fmt, bool full_range)
Same as av_map_videotoolbox_format_from_pixfmt function, but can map and return full range pixel form...
#define flags(name, subs,...)
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
@ AVCHROMA_LOC_BOTTOMLEFT
static int vt_pool_alloc(AVHWFramesContext *ctx)
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
#define av_fourcc2str(fourcc)