20 #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0602 22 #define _WIN32_WINNT 0x0602 29 UINT32 *pw, UINT32 *ph)
32 HRESULT hr = IMFAttributes_GetUINT64(pattr, guid, &t);
43 UINT64 t = (((UINT64)uw) << 32) | uh;
44 return IMFAttributes_SetUINT64(pattr, guid, t);
47 #define ff_MFSetAttributeRatio ff_MFSetAttributeSize 48 #define ff_MFGetAttributeRatio ff_MFGetAttributeSize 54 const MFT_REGISTER_TYPE_INFO *pInputType,
55 const MFT_REGISTER_TYPE_INFO *pOutputType,
56 IMFActivate ***pppMFTActivate, UINT32 *pnumMFTActivate)
58 HRESULT (WINAPI *MFTEnumEx_ptr)(GUID guidCategory, UINT32 Flags,
59 const MFT_REGISTER_TYPE_INFO *pInputType,
60 const MFT_REGISTER_TYPE_INFO *pOutputType,
61 IMFActivate ***pppMFTActivate,
62 UINT32 *pnumMFTActivate) =
NULL;
64 HANDLE lib = GetModuleHandleW(
L"mfplat.dll");
66 MFTEnumEx_ptr = (
void *)GetProcAddress(lib,
"MFTEnumEx");
71 MFTEnumEx_ptr = MFTEnumEx;
75 return MFTEnumEx_ptr(guidCategory,
85 #define HR(x) case x: return (char *) # x; 89 HR(MF_E_INVALIDMEDIATYPE)
90 HR(MF_E_INVALIDSTREAMNUMBER)
92 HR(MF_E_TRANSFORM_CANNOT_CHANGE_MEDIATYPE_WHILE_PROCESSING)
93 HR(MF_E_TRANSFORM_TYPE_NOT_SET)
94 HR(MF_E_UNSUPPORTED_D3D_TYPE)
95 HR(MF_E_TRANSFORM_NEED_MORE_INPUT)
96 HR(MF_E_TRANSFORM_STREAM_CHANGE)
98 HR(MF_E_NO_SAMPLE_TIMESTAMP)
99 HR(MF_E_NO_SAMPLE_DURATION)
102 snprintf(buf, size,
"%x", (
unsigned)hr);
115 hr = MFCreateSample(&sample);
119 align =
FFMAX(align, 16);
121 hr = MFCreateAlignedMemoryBuffer(size, align - 1, &buffer);
128 hr = IMFMediaBuffer_Lock(buffer, &tmp,
NULL,
NULL);
130 IMFMediaBuffer_Release(buffer);
131 IMFSample_Release(sample);
134 memcpy(tmp, fill_data, size);
136 IMFMediaBuffer_SetCurrentLength(buffer, size);
137 IMFMediaBuffer_Unlock(buffer);
140 IMFSample_AddBuffer(sample, buffer);
141 IMFMediaBuffer_Release(buffer);
152 hr = IMFAttributes_GetUINT32(type, &MF_MT_AUDIO_BITS_PER_SAMPLE, &bits);
156 hr = IMFAttributes_GetGUID(type, &MF_MT_SUBTYPE, &subtype);
160 if (IsEqualGUID(&subtype, &MFAudioFormat_PCM)) {
166 }
else if (IsEqualGUID(&subtype, &MFAudioFormat_Float)) {
183 {&MFVideoFormat_I420, AV_PIX_FMT_YUV420P},
186 {&MFVideoFormat_P016, AV_PIX_FMT_P010},
196 hr = IMFAttributes_GetGUID(type, &MF_MT_SUBTYPE, &subtype);
201 if (IsEqualGUID(&subtype, mf_pix_fmts[i].
guid))
213 if (mf_pix_fmts[i].pix_fmt == pix_fmt)
214 return mf_pix_fmts[
i].
guid;
224 if (guid->Data2 == 0 && guid->Data3 == 0x0010 &&
225 guid->Data4[0] == 0x80 &&
226 guid->Data4[1] == 0x00 &&
227 guid->Data4[2] == 0x00 &&
228 guid->Data4[3] == 0xAA &&
229 guid->Data4[4] == 0x00 &&
230 guid->Data4[5] == 0x38 &&
231 guid->Data4[6] == 0x9B &&
232 guid->Data4[7] == 0x71) {
233 *out_fourcc = guid->Data1;
246 #define GUID_ENTRY(var) {&(var), # var} 255 GUID_ENTRY(ff_MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT),
256 GUID_ENTRY(ff_MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT_PROGRESSIVE),
261 GUID_ENTRY(ff_MF_SA_D3D11_SHARED_WITHOUT_MUTEX),
299 GUID_ENTRY(MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION),
305 GUID_ENTRY(MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND),
311 GUID_ENTRY(MF_MT_AUDIO_VALID_BITS_PER_SAMPLE),
346 GUID_ENTRY(ff_CODECAPI_AVDecVideoThumbnailGenerationMode),
347 GUID_ENTRY(ff_CODECAPI_AVDecVideoDropPicWithMissingRef),
348 GUID_ENTRY(ff_CODECAPI_AVDecVideoSoftwareDeinterlaceMode),
349 GUID_ENTRY(ff_CODECAPI_AVDecVideoFastDecodeMode),
351 GUID_ENTRY(ff_CODECAPI_AVDecVideoH264ErrorConcealment),
352 GUID_ENTRY(ff_CODECAPI_AVDecVideoMPEG2ErrorConcealment),
355 GUID_ENTRY(ff_CODECAPI_AVDecVideoDXVABusEncryption),
356 GUID_ENTRY(ff_CODECAPI_AVDecVideoSWPowerLevel),
357 GUID_ENTRY(ff_CODECAPI_AVDecVideoMaxCodedWidth),
358 GUID_ENTRY(ff_CODECAPI_AVDecVideoMaxCodedHeight),
359 GUID_ENTRY(ff_CODECAPI_AVDecNumWorkerThreads),
360 GUID_ENTRY(ff_CODECAPI_AVDecSoftwareDynamicFormatChange),
361 GUID_ENTRY(ff_CODECAPI_AVDecDisableVideoPostProcessing),
369 if (IsEqualGUID(guid, guid_names[n].guid)) {
381 "{%8.8x-%4.4x-%4.4x-%2.2x%2.2x-%2.2x%2.2x%2.2x%2.2x%2.2x%2.2x}",
382 (
unsigned) guid->Data1, guid->Data2, guid->Data3,
383 guid->Data4[0], guid->Data4[1],
384 guid->Data4[2], guid->Data4[3],
385 guid->Data4[4], guid->Data4[5],
386 guid->Data4[6], guid->Data4[7]);
396 hr = IMFAttributes_GetCount(attrs, &count);
400 for (n = 0; n <
count; n++) {
402 MF_ATTRIBUTE_TYPE
type;
403 char extra[80] = {0};
406 hr = IMFAttributes_GetItemByIndex(attrs, n, &key,
NULL);
412 if (IsEqualGUID(&key, &MF_MT_AUDIO_CHANNEL_MASK)) {
414 hr = IMFAttributes_GetUINT32(attrs, &key, &v);
417 snprintf(extra,
sizeof(extra),
" (0x%x)", (
unsigned)v);
418 }
else if (IsEqualGUID(&key, &MF_MT_FRAME_SIZE)) {
424 snprintf(extra,
sizeof(extra),
" (%dx%d)", (
int)w, (
int)h);
425 }
else if (IsEqualGUID(&key, &MF_MT_PIXEL_ASPECT_RATIO) ||
426 IsEqualGUID(&key, &MF_MT_FRAME_RATE)) {
432 snprintf(extra,
sizeof(extra),
" (%d:%d)", (
int)num, (
int)den);
435 hr = IMFAttributes_GetItemType(attrs, &key, &type);
440 case MF_ATTRIBUTE_UINT32: {
442 hr = IMFAttributes_GetUINT32(attrs, &key, &v);
447 case MF_ATTRIBUTE_UINT64: {
449 hr = IMFAttributes_GetUINT64(attrs, &key, &v);
455 case MF_ATTRIBUTE_DOUBLE: {
457 hr = IMFAttributes_GetDouble(attrs, &key, &v);
463 case MF_ATTRIBUTE_STRING: {
465 hr = IMFAttributes_GetString(attrs, &key, s,
sizeof(s),
NULL);
471 case MF_ATTRIBUTE_GUID: {
473 hr = IMFAttributes_GetGUID(attrs, &key, &v);
479 case MF_ATTRIBUTE_BLOB: {
482 hr = IMFAttributes_GetBlobSize(attrs, &key, &sz);
485 if (sz <=
sizeof(buffer)) {
489 hr = IMFAttributes_GetBlob(attrs, &key, buffer,
sizeof(buffer), &sz);
492 for (pos = 0; pos < sz; pos++) {
493 const char *hex =
"0123456789ABCDEF";
494 if (pos * 3 + 3 >
sizeof(str))
496 str[pos * 3 + 0] = hex[buffer[
pos] >> 4];
497 str[pos * 3 + 1] = hex[buffer[
pos] & 15];
498 str[pos * 3 + 2] =
' ';
500 str[pos * 3 + 0] = 0;
507 case MF_ATTRIBUTE_IUNKNOWN: {
517 if (IsEqualGUID(&key, &MF_MT_SUBTYPE)) {
547 default:
return NULL;
555 hr = CoInitializeEx(
NULL, COINIT_MULTITHREADED);
556 if (hr == RPC_E_CHANGED_MODE) {
559 }
else if (FAILED(hr)) {
564 hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
584 MFT_REGISTER_TYPE_INFO *in_type,
585 MFT_REGISTER_TYPE_INFO *out_type,
594 IMFActivate *winner = 0;
601 flags = MFT_ENUM_FLAG_SORTANDFILTER;
604 flags |= MFT_ENUM_FLAG_HARDWARE;
606 flags |= MFT_ENUM_FLAG_SYNCMFT;
609 hr =
ff_MFTEnumEx(category, flags, in_type, out_type, &activate,
612 goto error_uninit_mf;
618 for (n = 0; n < num_activate; n++) {
625 for (n = 0; n < num_activate; n++) {
628 hr = IMFActivate_ActivateObject(activate[n], &IID_IMFTransform,
631 winner = activate[n];
632 IMFActivate_AddRef(winner);
637 for (n = 0; n < num_activate; n++)
638 IMFActivate_Release(activate[n]);
639 CoTaskMemFree(activate);
644 goto error_uninit_mf;
649 IMFAttributes *attrs;
650 hr = IMFTransform_GetAttributes(*res, &attrs);
651 if (!FAILED(hr) && attrs) {
655 IMFAttributes_Release(attrs);
658 hr = IMFActivate_GetString(winner, &MFT_FRIENDLY_NAME_Attribute, s,
665 IMFActivate_Release(winner);
677 IMFTransform_Release(*mft);
void ff_media_type_dump(void *log, IMFMediaType *type)
static int init_com_mf(void *log)
#define ff_guid_str(guid)
enum AVPixelFormat ff_media_type_to_pix_fmt(IMFAttributes *type)
enum AVPixelFormat pix_fmt
int ff_instantiate_mf(void *log, GUID category, MFT_REGISTER_TYPE_INFO *in_type, MFT_REGISTER_TYPE_INFO *out_type, int use_hw, IMFTransform **res)
void ff_attributes_dump(void *log, IMFAttributes *attrs)
#define ff_MFGetAttributeRatio
filter_frame For filters that do not use the activate() callback
#define AV_LOG_VERBOSE
Detailed information.
static const struct mf_pix_fmt_entry mf_pix_fmts[]
AVCodecID
Identify the syntax and semantics of the bitstream.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
preferred ID for decoding MPEG audio layer 1, 2 or 3
#define av_fourcc2str(fourcc)
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
const char * av_get_sample_fmt_name(enum AVSampleFormat sample_fmt)
Return the name of sample_fmt, or NULL if sample_fmt is not recognized.
char * ff_guid_str_buf(char *buf, size_t buf_size, const GUID *guid)
#define FF_ARRAY_ELEMS(a)
IMFSample * ff_create_memory_sample(void *fill_data, size_t size, size_t align)
static void uninit_com_mf(void)
#define AV_LOG_INFO
Standard information.
AVSampleFormat
Audio sample formats.
char * ff_hr_str_buf(char *buf, size_t size, HRESULT hr)
const GUID * ff_pix_fmt_to_guid(enum AVPixelFormat pix_fmt)
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
enum AVSampleFormat ff_media_type_to_sample_fmt(IMFAttributes *type)
void ff_free_mf(IMFTransform **mft)
#define flags(name, subs,...)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
HRESULT ff_MFSetAttributeSize(IMFAttributes *pattr, REFGUID guid, UINT32 uw, UINT32 uh)
const CLSID * ff_codec_to_mf_subtype(enum AVCodecID codec)
int ff_fourcc_from_guid(const GUID *guid, uint32_t *out_fourcc)
static struct GUID_Entry guid_names[]
HRESULT ff_MFTEnumEx(GUID guidCategory, UINT32 Flags, const MFT_REGISTER_TYPE_INFO *pInputType, const MFT_REGISTER_TYPE_INFO *pOutputType, IMFActivate ***pppMFTActivate, UINT32 *pnumMFTActivate)
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
HRESULT ff_MFGetAttributeSize(IMFAttributes *pattr, REFGUID guid, UINT32 *pw, UINT32 *ph)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
AVPixelFormat
Pixel format.