FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 #include "config_components.h"
21 
22 #include "libavutil/avassert.h"
23 #include "libavutil/imgutils.h"
24 #include "libavutil/hwcontext.h"
25 #if CONFIG_D3D11VA
27 #endif
28 #if CONFIG_DXVA2
29 #define COBJMACROS
31 #endif
32 #include "libavutil/mem.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35 
36 #include "amfenc.h"
37 #include "encode.h"
38 #include "internal.h"
40 
41 static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
42 {
43  AVFrameSideData *sd_display;
44  AVFrameSideData *sd_light;
45  AVMasteringDisplayMetadata *display_meta;
46  AVContentLightMetadata *light_meta;
47 
49  if (sd_display) {
50  display_meta = (AVMasteringDisplayMetadata *)sd_display->data;
51  if (display_meta->has_luminance) {
52  const unsigned int luma_den = 10000;
53  hdrmeta->maxMasteringLuminance =
54  (amf_uint32)(luma_den * av_q2d(display_meta->max_luminance));
55  hdrmeta->minMasteringLuminance =
56  FFMIN((amf_uint32)(luma_den * av_q2d(display_meta->min_luminance)), hdrmeta->maxMasteringLuminance);
57  }
58  if (display_meta->has_primaries) {
59  const unsigned int chroma_den = 50000;
60  hdrmeta->redPrimary[0] =
61  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][0])), chroma_den);
62  hdrmeta->redPrimary[1] =
63  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][1])), chroma_den);
64  hdrmeta->greenPrimary[0] =
65  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][0])), chroma_den);
66  hdrmeta->greenPrimary[1] =
67  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][1])), chroma_den);
68  hdrmeta->bluePrimary[0] =
69  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][0])), chroma_den);
70  hdrmeta->bluePrimary[1] =
71  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][1])), chroma_den);
72  hdrmeta->whitePoint[0] =
73  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[0])), chroma_den);
74  hdrmeta->whitePoint[1] =
75  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[1])), chroma_den);
76  }
77 
79  if (sd_light) {
80  light_meta = (AVContentLightMetadata *)sd_light->data;
81  if (light_meta) {
82  hdrmeta->maxContentLightLevel = (amf_uint16)light_meta->MaxCLL;
83  hdrmeta->maxFrameAverageLightLevel = (amf_uint16)light_meta->MaxFALL;
84  }
85  }
86  return 0;
87  }
88  return 1;
89 }
90 
91 #if CONFIG_D3D11VA
92 #include <d3d11.h>
93 #endif
94 
95 #ifdef _WIN32
96 #include "compat/w32dlfcn.h"
97 #else
98 #include <dlfcn.h>
99 #endif
100 
101 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
102 
103 #define PTS_PROP L"PtsProp"
104 
108 #if CONFIG_D3D11VA
110 #endif
111 #if CONFIG_DXVA2
113 #endif
116 };
117 
118 typedef struct FormatMap {
120  enum AMF_SURFACE_FORMAT amf_format;
121 } FormatMap;
122 
123 static const FormatMap format_map[] =
124 {
125  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
126  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
127  { AV_PIX_FMT_P010, AMF_SURFACE_P010 },
128  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
129  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
130  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
131  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
132  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
133 };
134 
135 static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
136 {
137  int i;
138  for (i = 0; i < amf_countof(format_map); i++) {
139  if (format_map[i].av_format == fmt) {
140  return format_map[i].amf_format;
141  }
142  }
143  return AMF_SURFACE_UNKNOWN;
144 }
145 
146 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
147  const wchar_t *scope, const wchar_t *message)
148 {
149  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
150  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
151 }
152 
153 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
154 {
155 }
156 
157 static AMFTraceWriterVtbl tracer_vtbl =
158 {
159  .Write = AMFTraceWriter_Write,
160  .Flush = AMFTraceWriter_Flush,
161 };
162 
164 {
165  AmfContext *ctx = avctx->priv_data;
166  AMFInit_Fn init_fun;
167  AMFQueryVersion_Fn version_fun;
168  AMF_RESULT res;
169 
170  ctx->delayed_frame = av_frame_alloc();
171  if (!ctx->delayed_frame) {
172  return AVERROR(ENOMEM);
173  }
174  // hardcoded to current HW queue size - will auto-realloc if too small
175  ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
177  if (!ctx->timestamp_list) {
178  return AVERROR(ENOMEM);
179  }
180  ctx->dts_delay = 0;
181 
182 
183  ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
184  AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
185  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
186 
187  init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
188  AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
189 
190  version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
191  AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
192 
193  res = version_fun(&ctx->version);
194  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
195  res = init_fun(AMF_FULL_VERSION, &ctx->factory);
196  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
197  res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
198  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
199  res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
200  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
201  return 0;
202 }
203 
204 #if CONFIG_D3D11VA
205 static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
206 {
207  AmfContext *ctx = avctx->priv_data;
208  AMF_RESULT res;
209 
210  res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
211  if (res != AMF_OK) {
212  if (res == AMF_NOT_SUPPORTED)
213  av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
214  else
215  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
216  return AVERROR(ENODEV);
217  }
218 
219  return 0;
220 }
221 #endif
222 
223 #if CONFIG_DXVA2
224 static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
225 {
226  AmfContext *ctx = avctx->priv_data;
227  HANDLE device_handle;
228  IDirect3DDevice9 *device;
229  HRESULT hr;
230  AMF_RESULT res;
231  int ret;
232 
233  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
234  if (FAILED(hr)) {
235  av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
236  return AVERROR_EXTERNAL;
237  }
238 
239  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
240  if (SUCCEEDED(hr)) {
241  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
242  ret = 0;
243  } else {
244  av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
246  }
247 
248  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
249 
250  if (ret < 0)
251  return ret;
252 
253  res = ctx->context->pVtbl->InitDX9(ctx->context, device);
254 
255  IDirect3DDevice9_Release(device);
256 
257  if (res != AMF_OK) {
258  if (res == AMF_NOT_SUPPORTED)
259  av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
260  else
261  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
262  return AVERROR(ENODEV);
263  }
264 
265  return 0;
266 }
267 #endif
268 
270 {
271  AmfContext *ctx = avctx->priv_data;
272  AMFContext1 *context1 = NULL;
273  AMF_RESULT res;
274  av_unused int ret;
275 
276  ctx->hwsurfaces_in_queue = 0;
277 
278  // configure AMF logger
279  // the return of these functions indicates old state and do not affect behaviour
280  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
281  if (ctx->log_to_dbg)
282  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
283  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
284  ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
285 
286  // connect AMF logger to av_log
287  ctx->tracer.vtbl = &tracer_vtbl;
288  ctx->tracer.avctx = avctx;
289  ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
290  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
291 
292  res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
293  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
294 
295  // If a device was passed to the encoder, try to initialise from that.
296  if (avctx->hw_frames_ctx) {
297  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
298 
299  if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
300  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
301  av_get_pix_fmt_name(frames_ctx->sw_format));
302  return AVERROR(EINVAL);
303  }
304 
305  switch (frames_ctx->device_ctx->type) {
306 #if CONFIG_D3D11VA
308  ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
309  if (ret < 0)
310  return ret;
311  break;
312 #endif
313 #if CONFIG_DXVA2
315  ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
316  if (ret < 0)
317  return ret;
318  break;
319 #endif
320  default:
321  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
323  return AVERROR(ENOSYS);
324  }
325 
326  ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
327  if (!ctx->hw_frames_ctx)
328  return AVERROR(ENOMEM);
329 
330  if (frames_ctx->initial_pool_size > 0)
331  ctx->hwsurfaces_in_queue_max = FFMIN(ctx->hwsurfaces_in_queue_max, frames_ctx->initial_pool_size - 1);
332 
333  } else if (avctx->hw_device_ctx) {
334  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
335 
336  switch (device_ctx->type) {
337 #if CONFIG_D3D11VA
339  ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
340  if (ret < 0)
341  return ret;
342  break;
343 #endif
344 #if CONFIG_DXVA2
346  ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
347  if (ret < 0)
348  return ret;
349  break;
350 #endif
351  default:
352  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
353  av_hwdevice_get_type_name(device_ctx->type));
354  return AVERROR(ENOSYS);
355  }
356 
357  ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
358  if (!ctx->hw_device_ctx)
359  return AVERROR(ENOMEM);
360 
361  } else {
362  res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
363  if (res == AMF_OK) {
364  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
365  } else {
366  res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
367  if (res == AMF_OK) {
368  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
369  } else {
370  AMFGuid guid = IID_AMFContext1();
371  res = ctx->context->pVtbl->QueryInterface(ctx->context, &guid, (void**)&context1);
372  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
373 
374  res = context1->pVtbl->InitVulkan(context1, NULL);
375  context1->pVtbl->Release(context1);
376  if (res != AMF_OK) {
377  if (res == AMF_NOT_SUPPORTED)
378  av_log(avctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
379  else
380  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
381  return AVERROR(ENOSYS);
382  }
383  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
384  }
385  }
386  }
387  return 0;
388 }
389 
391 {
392  AmfContext *ctx = avctx->priv_data;
393  const wchar_t *codec_id = NULL;
394  AMF_RESULT res;
395  enum AVPixelFormat pix_fmt;
396 
397  switch (avctx->codec->id) {
398  case AV_CODEC_ID_H264:
399  codec_id = AMFVideoEncoderVCE_AVC;
400  break;
401  case AV_CODEC_ID_HEVC:
402  codec_id = AMFVideoEncoder_HEVC;
403  break;
404  case AV_CODEC_ID_AV1 :
405  codec_id = AMFVideoEncoder_AV1;
406  break;
407  default:
408  break;
409  }
410  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
411 
412  if (ctx->hw_frames_ctx)
413  pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
414  else
415  pix_fmt = avctx->pix_fmt;
416 
417  if (pix_fmt == AV_PIX_FMT_P010) {
418  AMF_RETURN_IF_FALSE(ctx, ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0), AVERROR_UNKNOWN, "10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
419  }
420 
421  ctx->format = amf_av_to_amf_format(pix_fmt);
422  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
423  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
424 
425  res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
426  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
427 
428  ctx->submitted_frame = 0;
429 
430  return 0;
431 }
432 
434 {
435  AmfContext *ctx = avctx->priv_data;
436 
437  if (ctx->delayed_surface) {
438  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
439  ctx->delayed_surface = NULL;
440  }
441 
442  if (ctx->encoder) {
443  ctx->encoder->pVtbl->Terminate(ctx->encoder);
444  ctx->encoder->pVtbl->Release(ctx->encoder);
445  ctx->encoder = NULL;
446  }
447 
448  if (ctx->context) {
449  ctx->context->pVtbl->Terminate(ctx->context);
450  ctx->context->pVtbl->Release(ctx->context);
451  ctx->context = NULL;
452  }
453  av_buffer_unref(&ctx->hw_device_ctx);
454  av_buffer_unref(&ctx->hw_frames_ctx);
455 
456  if (ctx->trace) {
457  ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
458  }
459  if (ctx->library) {
460  dlclose(ctx->library);
461  ctx->library = NULL;
462  }
463  ctx->trace = NULL;
464  ctx->debug = NULL;
465  ctx->factory = NULL;
466  ctx->version = 0;
467  ctx->delayed_drain = 0;
468  av_frame_free(&ctx->delayed_frame);
469  av_fifo_freep2(&ctx->timestamp_list);
470 
471  return 0;
472 }
473 
474 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
475  AMFSurface* surface)
476 {
477  AMFPlane *plane;
478  uint8_t *dst_data[4];
479  int dst_linesize[4];
480  int planes;
481  int i;
482 
483  planes = surface->pVtbl->GetPlanesCount(surface);
484  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
485 
486  for (i = 0; i < planes; i++) {
487  plane = surface->pVtbl->GetPlaneAt(surface, i);
488  dst_data[i] = plane->pVtbl->GetNative(plane);
489  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
490  }
491  av_image_copy2(dst_data, dst_linesize,
492  frame->data, frame->linesize, frame->format,
493  avctx->width, avctx->height);
494 
495  return 0;
496 }
497 
498 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
499 {
500  AmfContext *ctx = avctx->priv_data;
501  int ret;
502  AMFVariantStruct var = {0};
503  int64_t timestamp = AV_NOPTS_VALUE;
504  int64_t size = buffer->pVtbl->GetSize(buffer);
505 
506  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
507  return ret;
508  }
509  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
510 
511  switch (avctx->codec->id) {
512  case AV_CODEC_ID_H264:
513  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
514  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
516  }
517  break;
518  case AV_CODEC_ID_HEVC:
519  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
520  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
522  }
523  break;
524  case AV_CODEC_ID_AV1:
525  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
526  if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
528  }
529  default:
530  break;
531  }
532 
533  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
534 
535  pkt->pts = var.int64Value; // original pts
536 
537 
538  AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
539  AVERROR_UNKNOWN, "timestamp_list is empty\n");
540 
541  // calc dts shift if max_b_frames > 0
542  if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
543  int64_t timestamp_last = AV_NOPTS_VALUE;
544  size_t can_read = av_fifo_can_read(ctx->timestamp_list);
545  AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
546  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
547  av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
548  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
549  return AVERROR(ERANGE);
550  }
551  ctx->dts_delay = timestamp_last - timestamp;
552  }
553  pkt->dts = timestamp - ctx->dts_delay;
554  return 0;
555 }
556 
557 // amfenc API implementation
559 {
560  int ret;
561 
562  if ((ret = amf_load_library(avctx)) == 0) {
563  if ((ret = amf_init_context(avctx)) == 0) {
564  if ((ret = amf_init_encoder(avctx)) == 0) {
565  return 0;
566  }
567  }
568  }
569  ff_amf_encode_close(avctx);
570  return ret;
571 }
572 
573 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
574 {
575  AMF_RESULT res;
576  AMFVariantStruct var;
577  res = AMFVariantInit(&var);
578  if (res == AMF_OK) {
579  AMFGuid guid_AMFInterface = IID_AMFInterface();
580  AMFInterface *amf_interface;
581  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
582 
583  if (res == AMF_OK) {
584  res = AMFVariantAssignInterface(&var, amf_interface);
585  amf_interface->pVtbl->Release(amf_interface);
586  }
587  if (res == AMF_OK) {
588  res = object->pVtbl->SetProperty(object, name, var);
589  }
590  AMFVariantClear(&var);
591  }
592  return res;
593 }
594 
595 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
596 {
597  AMF_RESULT res;
598  AMFVariantStruct var;
599  res = AMFVariantInit(&var);
600  if (res == AMF_OK) {
601  res = object->pVtbl->GetProperty(object, name, &var);
602  if (res == AMF_OK) {
603  if (var.type == AMF_VARIANT_INTERFACE) {
604  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
605  AMFInterface *amf_interface = AMFVariantInterface(&var);
606  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
607  } else {
608  res = AMF_INVALID_DATA_TYPE;
609  }
610  }
611  AMFVariantClear(&var);
612  }
613  return res;
614 }
615 
616 static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
617 {
619  AMFBuffer *frame_ref_storage_buffer = NULL;
620  AMF_RESULT res;
621 
622  res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
623  if (res == AMF_OK) {
625  if (frame_ref) {
626  memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
627  } else {
628  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
629  frame_ref_storage_buffer = NULL;
630  }
631  }
632  return frame_ref_storage_buffer;
633 }
634 
635 static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
636 {
638  memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
640  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
641 }
642 
644 {
645  AmfContext *ctx = avctx->priv_data;
646  AMFSurface *surface;
647  AMF_RESULT res;
648  int ret;
649  AMF_RESULT res_query;
650  AMFData *data = NULL;
651  AVFrame *frame = ctx->delayed_frame;
652  int block_and_wait;
653  int query_output_data_flag = 0;
654  AMF_RESULT res_resubmit;
655 
656  if (!ctx->encoder)
657  return AVERROR(EINVAL);
658 
659  if (!frame->buf[0]) {
660  ret = ff_encode_get_frame(avctx, frame);
661  if (ret < 0 && ret != AVERROR_EOF)
662  return ret;
663  }
664 
665  if (!frame->buf[0]) { // submit drain
666  if (!ctx->eof) { // submit drain one time only
667  if (ctx->delayed_surface != NULL) {
668  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
669  } else if(!ctx->delayed_drain) {
670  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
671  if (res == AMF_INPUT_FULL) {
672  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
673  } else {
674  if (res == AMF_OK) {
675  ctx->eof = 1; // drain started
676  }
677  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
678  }
679  }
680  }
681  } else if (!ctx->delayed_surface) { // submit frame
682  int hw_surface = 0;
683 
684  // prepare surface from frame
685  switch (frame->format) {
686 #if CONFIG_D3D11VA
687  case AV_PIX_FMT_D3D11:
688  {
689  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
690  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
691  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
692 
693  av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
694  frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
695 
696  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
697 
698  res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
699  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
700 
701  hw_surface = 1;
702  }
703  break;
704 #endif
705 #if CONFIG_DXVA2
707  {
708  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
709 
710  res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
711  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
712 
713  hw_surface = 1;
714  }
715  break;
716 #endif
717  default:
718  {
719  res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
720  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
721  amf_copy_surface(avctx, frame, surface);
722  }
723  break;
724  }
725 
726  if (hw_surface) {
727  AMFBuffer *frame_ref_storage_buffer;
728 
729  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
730  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
731 
732  frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
733  AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
734 
735  res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
736  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
737  ctx->hwsurfaces_in_queue++;
738  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
739  }
740 
741  // HDR10 metadata
742  if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
743  AMFBuffer * hdrmeta_buffer = NULL;
744  res = ctx->context->pVtbl->AllocBuffer(ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
745  if (res == AMF_OK) {
746  AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
747  if (amf_save_hdr_metadata(avctx, frame, hdrmeta) == 0) {
748  switch (avctx->codec->id) {
749  case AV_CODEC_ID_H264:
750  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
751  case AV_CODEC_ID_HEVC:
752  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
753  case AV_CODEC_ID_AV1:
754  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
755  }
756  res = amf_set_property_buffer(surface, L"av_frame_hdrmeta", hdrmeta_buffer);
757  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
758  }
759  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
760  }
761  }
762 
763  surface->pVtbl->SetPts(surface, frame->pts);
764  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
765 
766  switch (avctx->codec->id) {
767  case AV_CODEC_ID_H264:
768  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
769  switch (frame->pict_type) {
770  case AV_PICTURE_TYPE_I:
771  if (ctx->forced_idr) {
772  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
773  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
774  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
775  } else {
776  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
777  }
778  break;
779  case AV_PICTURE_TYPE_P:
780  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
781  break;
782  case AV_PICTURE_TYPE_B:
783  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
784  break;
785  }
786  break;
787  case AV_CODEC_ID_HEVC:
788  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
789  switch (frame->pict_type) {
790  case AV_PICTURE_TYPE_I:
791  if (ctx->forced_idr) {
792  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
793  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
794  } else {
795  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
796  }
797  break;
798  case AV_PICTURE_TYPE_P:
799  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
800  break;
801  }
802  break;
803  case AV_CODEC_ID_AV1:
804  if (frame->pict_type == AV_PICTURE_TYPE_I) {
805  if (ctx->forced_idr) {
806  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
807  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
808  } else {
809  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
810  }
811  }
812  break;
813  default:
814  break;
815  }
816 
817  // submit surface
818  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
819  if (res == AMF_INPUT_FULL) { // handle full queue
820  //store surface for later submission
821  ctx->delayed_surface = surface;
822  } else {
823  int64_t pts = frame->pts;
824  surface->pVtbl->Release(surface);
825  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
826 
828  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
829 
830  if (ctx->submitted_frame == 0)
831  {
832  ctx->use_b_frame = (ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false));
833  }
834  ctx->submitted_frame++;
835 
836  if (ret < 0)
837  return ret;
838  }
839  }
840 
841 
842  do {
843  block_and_wait = 0;
844  // poll data
845  if (!avpkt->data && !avpkt->buf && (ctx->use_b_frame ? (ctx->submitted_frame >= 2) : true) ) {
846  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
847  if (data) {
848  // copy data to packet
849  AMFBuffer *buffer;
850  AMFGuid guid = IID_AMFBuffer();
851  query_output_data_flag = 1;
852  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
853  ret = amf_copy_buffer(avctx, avpkt, buffer);
854 
855  ctx->submitted_frame++;
856  buffer->pVtbl->Release(buffer);
857 
858  if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
859  AMFBuffer* frame_ref_storage_buffer;
860  res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
861  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
862  amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
863  ctx->hwsurfaces_in_queue--;
864  }
865 
866  data->pVtbl->Release(data);
867 
868  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
869  }
870  }
871  res_resubmit = AMF_OK;
872  if (ctx->delayed_surface != NULL) { // try to resubmit frame
873  if (ctx->delayed_surface->pVtbl->HasProperty(ctx->delayed_surface, L"av_frame_hdrmeta")) {
874  AMFBuffer * hdrmeta_buffer = NULL;
875  res = amf_get_property_buffer((AMFData *)ctx->delayed_surface, L"av_frame_hdrmeta", &hdrmeta_buffer);
876  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
877  switch (avctx->codec->id) {
878  case AV_CODEC_ID_H264:
879  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
880  case AV_CODEC_ID_HEVC:
881  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
882  case AV_CODEC_ID_AV1:
883  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
884  }
885  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
886  }
887  res_resubmit = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
888  if (res_resubmit != AMF_INPUT_FULL) {
889  int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
890  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
891  ctx->delayed_surface = NULL;
892  av_frame_unref(ctx->delayed_frame);
893  AMF_RETURN_IF_FALSE(ctx, res_resubmit == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res_resubmit);
894 
895  ctx->submitted_frame++;
896  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
897  if (ret < 0)
898  return ret;
899  }
900  } else if (ctx->delayed_drain) { // try to resubmit drain
901  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
902  if (res != AMF_INPUT_FULL) {
903  ctx->delayed_drain = 0;
904  ctx->eof = 1; // drain started
905  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
906  } else {
907  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
908  }
909  }
910 
911  if (query_output_data_flag == 0) {
912  if (res_resubmit == AMF_INPUT_FULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
913  block_and_wait = 1;
914 
915  // Only sleep if the driver doesn't support waiting in QueryOutput()
916  // or if we already have output data so we will skip calling it.
917  if (!ctx->query_timeout_supported || avpkt->data || avpkt->buf) {
918  av_usleep(1000);
919  }
920  }
921  }
922  } while (block_and_wait);
923 
924  if (res_query == AMF_EOF) {
925  ret = AVERROR_EOF;
926  } else if (data == NULL) {
927  ret = AVERROR(EAGAIN);
928  } else {
929  ret = 0;
930  }
931  return ret;
932 }
933 
935 {
936  amf_int64 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
937  if (avctx->color_range == AVCOL_RANGE_JPEG) {
938  /// Color Space for Full (JPEG) Range
939  switch (avctx->colorspace) {
940  case AVCOL_SPC_SMPTE170M:
941  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
942  break;
943  case AVCOL_SPC_BT709:
944  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
945  break;
947  case AVCOL_SPC_BT2020_CL:
948  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
949  break;
950  }
951  } else {
952  /// Color Space for Limited (MPEG) range
953  switch (avctx->colorspace) {
954  case AVCOL_SPC_SMPTE170M:
955  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
956  break;
957  case AVCOL_SPC_BT709:
958  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
959  break;
961  case AVCOL_SPC_BT2020_CL:
962  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
963  break;
964  }
965  }
966  return color_profile;
967 }
968 
970 #if CONFIG_D3D11VA
971  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
972  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
973 #endif
974 #if CONFIG_DXVA2
975  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
977 #endif
978  NULL,
979 };
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AMFTraceWriter_Write
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: amfenc.c:146
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:699
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:963
FFMPEG_AMF_WRITER_ID
#define FFMPEG_AMF_WRITER_ID
Definition: amfenc.c:101
message
Definition: api-threadmessage-test.c:47
NONE
@ NONE
Definition: af_afade.c:60
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
int64_t
long long int64_t
Definition: coverity.c:34
av_unused
#define av_unused
Definition: attributes.h:131
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
av_fifo_peek
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
Definition: fifo.c:255
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:717
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:539
encode.h
data
const char data[16]
Definition: mxf.c:149
planes
static const struct @467 planes[]
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:573
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:185
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:474
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AMFTraceWriter_Flush
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: amfenc.c:153
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:652
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:594
FormatMap::amf_format
enum AMF_SURFACE_FORMAT amf_format
Definition: amfenc.c:120
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:460
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:433
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:558
val
static double val(void *priv, double ch)
Definition: aeval.c:77
pts
static int64_t pts
Definition: transcode_aac.c:644
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:647
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
amf_av_to_amf_format
static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
Definition: amfenc.c:135
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:390
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:230
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:609
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:112
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AmfTraceWriter::avctx
AVCodecContext * avctx
Definition: amfenc.h:43
if
if(ret)
Definition: filter_design.txt:179
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
tracer_vtbl
static AMFTraceWriterVtbl tracer_vtbl
Definition: amfenc.c:157
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:522
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:709
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:284
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:643
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:498
AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
Definition: frame.h:120
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:103
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
FormatMap::av_format
enum AVPixelFormat av_format
Definition: amfenc.c:119
index
int index
Definition: gxfenc.c:90
AmfTraceWriter
AMF trace writer callback class Used to capture all AMF logging.
Definition: amfenc.h:41
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:628
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:95
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:105
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVFrameSideData::data
uint8_t * data
Definition: frame.h:267
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:538
amf_create_buffer_with_frame_ref
static AMFBuffer * amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
Definition: amfenc.c:616
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:545
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AVCodec::id
enum AVCodecID id
Definition: codec.h:201
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:98
AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
Definition: frame.h:137
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:532
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:651
amf_save_hdr_metadata
static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: amfenc.c:41
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:969
amf_load_library
static int amf_load_library(AVCodecContext *avctx)
Definition: amfenc.c:163
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:228
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:622
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1515
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
AVCodecContext::height
int height
Definition: avcodec.h:632
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:671
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1493
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
frame_ref
static int frame_ref(AVFrame *dst, const AVFrame *src)
Definition: swscale.c:1331
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
FormatMap
Definition: amfenc.c:118
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVFormatContext::debug
int debug
Flags to enable debugging.
Definition: avformat.h:1578
AVCodecContext
main external API structure.
Definition: avcodec.h:451
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:106
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
amf_get_property_buffer
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfenc.c:595
L
#define L(x)
Definition: vpx_arith.h:36
amfenc.h
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:552
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:187
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:809
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:205
mastering_display_metadata.h
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:265
ff_amf_get_color_profile
int ff_amf_get_color_profile(AVCodecContext *avctx)
Definition: amfenc.c:934
format_map
static const FormatMap format_map[]
Definition: amfenc.c:123
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:632
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AmfContext
AMF encoder context.
Definition: amfenc.h:50
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:642
amf_release_buffer_with_frame_ref
static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
Definition: amfenc.c:635
hwcontext_d3d11va.h
AV_FIFO_FLAG_AUTO_GROW
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
Definition: fifo.h:63
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3090
amf_init_context
static int amf_init_context(AVCodecContext *avctx)
Definition: amfenc.c:269