FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/hwcontext.h"
24 #if CONFIG_D3D11VA
26 #endif
27 #if CONFIG_DXVA2
28 #define COBJMACROS
30 #endif
31 #include "libavutil/mem.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/time.h"
34 
35 #include "amfenc.h"
36 #include "encode.h"
37 #include "internal.h"
38 
39 #if CONFIG_D3D11VA
40 #include <d3d11.h>
41 #endif
42 
43 #ifdef _WIN32
44 #include "compat/w32dlfcn.h"
45 #else
46 #include <dlfcn.h>
47 #endif
48 
49 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
50 
51 #define PTS_PROP L"PtsProp"
52 
56 #if CONFIG_D3D11VA
58 #endif
59 #if CONFIG_DXVA2
61 #endif
63 };
64 
65 typedef struct FormatMap {
67  enum AMF_SURFACE_FORMAT amf_format;
68 } FormatMap;
69 
70 static const FormatMap format_map[] =
71 {
72  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
73  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
74  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
75  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
76  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
77  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
78  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
79 };
80 
81 static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
82 {
83  int i;
84  for (i = 0; i < amf_countof(format_map); i++) {
85  if (format_map[i].av_format == fmt) {
86  return format_map[i].amf_format;
87  }
88  }
89  return AMF_SURFACE_UNKNOWN;
90 }
91 
92 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
93  const wchar_t *scope, const wchar_t *message)
94 {
95  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
96  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
97 }
98 
99 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
100 {
101 }
102 
103 static AMFTraceWriterVtbl tracer_vtbl =
104 {
105  .Write = AMFTraceWriter_Write,
106  .Flush = AMFTraceWriter_Flush,
107 };
108 
110 {
111  AmfContext *ctx = avctx->priv_data;
112  AMFInit_Fn init_fun;
113  AMFQueryVersion_Fn version_fun;
114  AMF_RESULT res;
115 
116  ctx->delayed_frame = av_frame_alloc();
117  if (!ctx->delayed_frame) {
118  return AVERROR(ENOMEM);
119  }
120  // hardcoded to current HW queue size - will realloc in timestamp_queue_enqueue() if too small
121  ctx->timestamp_list = av_fifo_alloc((avctx->max_b_frames + 16) * sizeof(int64_t));
122  if (!ctx->timestamp_list) {
123  return AVERROR(ENOMEM);
124  }
125  ctx->dts_delay = 0;
126 
127 
128  ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
129  AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
130  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
131 
132  init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
133  AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
134 
135  version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
136  AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
137 
138  res = version_fun(&ctx->version);
139  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
140  res = init_fun(AMF_FULL_VERSION, &ctx->factory);
141  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
142  res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
143  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
144  res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
145  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
146  return 0;
147 }
148 
149 #if CONFIG_D3D11VA
150 static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
151 {
152  AmfContext *ctx = avctx->priv_data;
153  AMF_RESULT res;
154 
155  res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
156  if (res != AMF_OK) {
157  if (res == AMF_NOT_SUPPORTED)
158  av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
159  else
160  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
161  return AVERROR(ENODEV);
162  }
163 
164  return 0;
165 }
166 #endif
167 
168 #if CONFIG_DXVA2
169 static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
170 {
171  AmfContext *ctx = avctx->priv_data;
172  HANDLE device_handle;
173  IDirect3DDevice9 *device;
174  HRESULT hr;
175  AMF_RESULT res;
176  int ret;
177 
178  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
179  if (FAILED(hr)) {
180  av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
181  return AVERROR_EXTERNAL;
182  }
183 
184  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
185  if (SUCCEEDED(hr)) {
186  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
187  ret = 0;
188  } else {
189  av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
191  }
192 
193  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
194 
195  if (ret < 0)
196  return ret;
197 
198  res = ctx->context->pVtbl->InitDX9(ctx->context, device);
199 
200  IDirect3DDevice9_Release(device);
201 
202  if (res != AMF_OK) {
203  if (res == AMF_NOT_SUPPORTED)
204  av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
205  else
206  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
207  return AVERROR(ENODEV);
208  }
209 
210  return 0;
211 }
212 #endif
213 
215 {
216  AmfContext *ctx = avctx->priv_data;
217  AMFContext1 *context1 = NULL;
218  AMF_RESULT res;
219  av_unused int ret;
220 
221  ctx->hwsurfaces_in_queue = 0;
222  ctx->hwsurfaces_in_queue_max = 16;
223 
224  // configure AMF logger
225  // the return of these functions indicates old state and do not affect behaviour
226  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
227  if (ctx->log_to_dbg)
228  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
229  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
230  ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
231 
232  // connect AMF logger to av_log
233  ctx->tracer.vtbl = &tracer_vtbl;
234  ctx->tracer.avctx = avctx;
235  ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
236  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
237 
238  res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
239  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
240 
241  // If a device was passed to the encoder, try to initialise from that.
242  if (avctx->hw_frames_ctx) {
243  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
244 
245  if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
246  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
247  av_get_pix_fmt_name(frames_ctx->sw_format));
248  return AVERROR(EINVAL);
249  }
250 
251  switch (frames_ctx->device_ctx->type) {
252 #if CONFIG_D3D11VA
254  ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
255  if (ret < 0)
256  return ret;
257  break;
258 #endif
259 #if CONFIG_DXVA2
261  ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
262  if (ret < 0)
263  return ret;
264  break;
265 #endif
266  default:
267  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
269  return AVERROR(ENOSYS);
270  }
271 
272  ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
273  if (!ctx->hw_frames_ctx)
274  return AVERROR(ENOMEM);
275 
276  if (frames_ctx->initial_pool_size > 0)
277  ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
278 
279  } else if (avctx->hw_device_ctx) {
280  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
281 
282  switch (device_ctx->type) {
283 #if CONFIG_D3D11VA
285  ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
286  if (ret < 0)
287  return ret;
288  break;
289 #endif
290 #if CONFIG_DXVA2
292  ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
293  if (ret < 0)
294  return ret;
295  break;
296 #endif
297  default:
298  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
299  av_hwdevice_get_type_name(device_ctx->type));
300  return AVERROR(ENOSYS);
301  }
302 
303  ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
304  if (!ctx->hw_device_ctx)
305  return AVERROR(ENOMEM);
306 
307  } else {
308  res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
309  if (res == AMF_OK) {
310  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
311  } else {
312  res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
313  if (res == AMF_OK) {
314  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
315  } else {
316  AMFGuid guid = IID_AMFContext1();
317  res = ctx->context->pVtbl->QueryInterface(ctx->context, &guid, (void**)&context1);
318  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
319 
320  res = context1->pVtbl->InitVulkan(context1, NULL);
321  context1->pVtbl->Release(context1);
322  if (res != AMF_OK) {
323  if (res == AMF_NOT_SUPPORTED)
324  av_log(avctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
325  else
326  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
327  return AVERROR(ENOSYS);
328  }
329  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
330  }
331  }
332  }
333  return 0;
334 }
335 
337 {
338  AmfContext *ctx = avctx->priv_data;
339  const wchar_t *codec_id = NULL;
340  AMF_RESULT res;
341  enum AVPixelFormat pix_fmt;
342 
343  switch (avctx->codec->id) {
344  case AV_CODEC_ID_H264:
345  codec_id = AMFVideoEncoderVCE_AVC;
346  break;
347  case AV_CODEC_ID_HEVC:
348  codec_id = AMFVideoEncoder_HEVC;
349  break;
350  default:
351  break;
352  }
353  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
354 
355  if (ctx->hw_frames_ctx)
356  pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
357  else
358  pix_fmt = avctx->pix_fmt;
359 
360  ctx->format = amf_av_to_amf_format(pix_fmt);
361  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
362  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
363 
364  res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
365  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
366 
367  return 0;
368 }
369 
371 {
372  AmfContext *ctx = avctx->priv_data;
373 
374  if (ctx->delayed_surface) {
375  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
376  ctx->delayed_surface = NULL;
377  }
378 
379  if (ctx->encoder) {
380  ctx->encoder->pVtbl->Terminate(ctx->encoder);
381  ctx->encoder->pVtbl->Release(ctx->encoder);
382  ctx->encoder = NULL;
383  }
384 
385  if (ctx->context) {
386  ctx->context->pVtbl->Terminate(ctx->context);
387  ctx->context->pVtbl->Release(ctx->context);
388  ctx->context = NULL;
389  }
390  av_buffer_unref(&ctx->hw_device_ctx);
391  av_buffer_unref(&ctx->hw_frames_ctx);
392 
393  if (ctx->trace) {
394  ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
395  }
396  if (ctx->library) {
397  dlclose(ctx->library);
398  ctx->library = NULL;
399  }
400  ctx->trace = NULL;
401  ctx->debug = NULL;
402  ctx->factory = NULL;
403  ctx->version = 0;
404  ctx->delayed_drain = 0;
405  av_frame_free(&ctx->delayed_frame);
406  av_fifo_freep(&ctx->timestamp_list);
407 
408  return 0;
409 }
410 
411 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
412  AMFSurface* surface)
413 {
414  AMFPlane *plane;
415  uint8_t *dst_data[4];
416  int dst_linesize[4];
417  int planes;
418  int i;
419 
420  planes = surface->pVtbl->GetPlanesCount(surface);
421  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
422 
423  for (i = 0; i < planes; i++) {
424  plane = surface->pVtbl->GetPlaneAt(surface, i);
425  dst_data[i] = plane->pVtbl->GetNative(plane);
426  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
427  }
428  av_image_copy(dst_data, dst_linesize,
429  (const uint8_t**)frame->data, frame->linesize, frame->format,
430  avctx->width, avctx->height);
431 
432  return 0;
433 }
434 
435 static inline int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
436 {
437  AmfContext *ctx = avctx->priv_data;
438  if (av_fifo_space(ctx->timestamp_list) < sizeof(timestamp)) {
439  if (av_fifo_grow(ctx->timestamp_list, sizeof(timestamp)) < 0) {
440  return AVERROR(ENOMEM);
441  }
442  }
443  av_fifo_generic_write(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
444  return 0;
445 }
446 
447 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
448 {
449  AmfContext *ctx = avctx->priv_data;
450  int ret;
451  AMFVariantStruct var = {0};
452  int64_t timestamp = AV_NOPTS_VALUE;
453  int64_t size = buffer->pVtbl->GetSize(buffer);
454 
455  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
456  return ret;
457  }
458  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
459 
460  switch (avctx->codec->id) {
461  case AV_CODEC_ID_H264:
462  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
463  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
465  }
466  break;
467  case AV_CODEC_ID_HEVC:
468  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
469  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
471  }
472  break;
473  default:
474  break;
475  }
476 
477  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
478 
479  pkt->pts = var.int64Value; // original pts
480 
481 
482  AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN, "timestamp_list is empty\n");
483 
484  av_fifo_generic_read(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
485 
486  // calc dts shift if max_b_frames > 0
487  if (avctx->max_b_frames > 0 && ctx->dts_delay == 0) {
488  int64_t timestamp_last = AV_NOPTS_VALUE;
489  AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN,
490  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
492  ctx->timestamp_list,
493  &timestamp_last,
494  (av_fifo_size(ctx->timestamp_list) / sizeof(timestamp) - 1) * sizeof(timestamp_last),
495  sizeof(timestamp_last),
496  NULL);
497  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
498  return AVERROR(ERANGE);
499  }
500  ctx->dts_delay = timestamp_last - timestamp;
501  }
502  pkt->dts = timestamp - ctx->dts_delay;
503  return 0;
504 }
505 
506 // amfenc API implementation
508 {
509  int ret;
510 
511  if ((ret = amf_load_library(avctx)) == 0) {
512  if ((ret = amf_init_context(avctx)) == 0) {
513  if ((ret = amf_init_encoder(avctx)) == 0) {
514  return 0;
515  }
516  }
517  }
518  ff_amf_encode_close(avctx);
519  return ret;
520 }
521 
522 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
523 {
524  AMF_RESULT res;
525  AMFVariantStruct var;
526  res = AMFVariantInit(&var);
527  if (res == AMF_OK) {
528  AMFGuid guid_AMFInterface = IID_AMFInterface();
529  AMFInterface *amf_interface;
530  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
531 
532  if (res == AMF_OK) {
533  res = AMFVariantAssignInterface(&var, amf_interface);
534  amf_interface->pVtbl->Release(amf_interface);
535  }
536  if (res == AMF_OK) {
537  res = object->pVtbl->SetProperty(object, name, var);
538  }
539  AMFVariantClear(&var);
540  }
541  return res;
542 }
543 
544 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
545 {
546  AMF_RESULT res;
547  AMFVariantStruct var;
548  res = AMFVariantInit(&var);
549  if (res == AMF_OK) {
550  res = object->pVtbl->GetProperty(object, name, &var);
551  if (res == AMF_OK) {
552  if (var.type == AMF_VARIANT_INTERFACE) {
553  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
554  AMFInterface *amf_interface = AMFVariantInterface(&var);
555  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
556  } else {
557  res = AMF_INVALID_DATA_TYPE;
558  }
559  }
560  AMFVariantClear(&var);
561  }
562  return res;
563 }
564 
565 static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
566 {
567  AVFrame *frame_ref;
568  AMFBuffer *frame_ref_storage_buffer = NULL;
569  AMF_RESULT res;
570 
571  res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
572  if (res == AMF_OK) {
573  frame_ref = av_frame_clone(frame);
574  if (frame_ref) {
575  memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
576  } else {
577  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
578  frame_ref_storage_buffer = NULL;
579  }
580  }
581  return frame_ref_storage_buffer;
582 }
583 
584 static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
585 {
586  AVFrame *frame_ref;
587  memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
588  av_frame_free(&frame_ref);
589  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
590 }
591 
593 {
594  AmfContext *ctx = avctx->priv_data;
595  AMFSurface *surface;
596  AMF_RESULT res;
597  int ret;
598  AMF_RESULT res_query;
599  AMFData *data = NULL;
600  AVFrame *frame = ctx->delayed_frame;
601  int block_and_wait;
602 
603  if (!ctx->encoder)
604  return AVERROR(EINVAL);
605 
606  if (!frame->buf[0]) {
607  ret = ff_encode_get_frame(avctx, frame);
608  if (ret < 0 && ret != AVERROR_EOF)
609  return ret;
610  }
611 
612  if (!frame->buf[0]) { // submit drain
613  if (!ctx->eof) { // submit drain one time only
614  if (ctx->delayed_surface != NULL) {
615  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
616  } else if(!ctx->delayed_drain) {
617  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
618  if (res == AMF_INPUT_FULL) {
619  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
620  } else {
621  if (res == AMF_OK) {
622  ctx->eof = 1; // drain started
623  }
624  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
625  }
626  }
627  }
628  } else if (!ctx->delayed_surface) { // submit frame
629  int hw_surface = 0;
630 
631  // prepare surface from frame
632  switch (frame->format) {
633 #if CONFIG_D3D11VA
634  case AV_PIX_FMT_D3D11:
635  {
636  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
637  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
638  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
639 
640  av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
641  frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
642 
643  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
644 
645  res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
646  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
647 
648  hw_surface = 1;
649  }
650  break;
651 #endif
652 #if CONFIG_DXVA2
654  {
655  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
656 
657  res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
658  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
659 
660  hw_surface = 1;
661  }
662  break;
663 #endif
664  default:
665  {
666  res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
667  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
668  amf_copy_surface(avctx, frame, surface);
669  }
670  break;
671  }
672 
673  if (hw_surface) {
674  AMFBuffer *frame_ref_storage_buffer;
675 
676  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
677  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
678 
679  frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
680  AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
681 
682  res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
683  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
684  ctx->hwsurfaces_in_queue++;
685  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
686  }
687 
688  surface->pVtbl->SetPts(surface, frame->pts);
689  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
690 
691  switch (avctx->codec->id) {
692  case AV_CODEC_ID_H264:
693  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
694  break;
695  case AV_CODEC_ID_HEVC:
696  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
697  break;
698  default:
699  break;
700  }
701 
702  // submit surface
703  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
704  if (res == AMF_INPUT_FULL) { // handle full queue
705  //store surface for later submission
706  ctx->delayed_surface = surface;
707  } else {
708  int64_t pts = frame->pts;
709  surface->pVtbl->Release(surface);
710  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
711 
713  if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
714  return ret;
715  }
716  }
717  }
718 
719 
720  do {
721  block_and_wait = 0;
722  // poll data
723  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
724  if (data) {
725  // copy data to packet
726  AMFBuffer* buffer;
727  AMFGuid guid = IID_AMFBuffer();
728  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
729  ret = amf_copy_buffer(avctx, avpkt, buffer);
730 
731  buffer->pVtbl->Release(buffer);
732 
733  if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
734  AMFBuffer *frame_ref_storage_buffer;
735  res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
736  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
737  amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
738  ctx->hwsurfaces_in_queue--;
739  }
740 
741  data->pVtbl->Release(data);
742 
743  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
744 
745  if (ctx->delayed_surface != NULL) { // try to resubmit frame
746  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
747  if (res != AMF_INPUT_FULL) {
748  int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
749  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
750  ctx->delayed_surface = NULL;
751  av_frame_unref(ctx->delayed_frame);
752  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res);
753 
754  if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
755  return ret;
756  }
757  } else {
758  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed frame submission got AMF_INPUT_FULL- should not happen\n");
759  }
760  } else if (ctx->delayed_drain) { // try to resubmit drain
761  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
762  if (res != AMF_INPUT_FULL) {
763  ctx->delayed_drain = 0;
764  ctx->eof = 1; // drain started
765  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
766  } else {
767  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
768  }
769  }
770  } else if (ctx->delayed_surface != NULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
771  block_and_wait = 1;
772  av_usleep(1000); // wait and poll again
773  }
774  } while (block_and_wait);
775 
776  if (res_query == AMF_EOF) {
777  ret = AVERROR_EOF;
778  } else if (data == NULL) {
779  ret = AVERROR(EAGAIN);
780  } else {
781  ret = 0;
782  }
783  return ret;
784 }
785 
787 #if CONFIG_D3D11VA
788  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
789  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
790 #endif
791 #if CONFIG_DXVA2
792  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
794 #endif
795  NULL,
796 };
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AMFTraceWriter_Write
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: amfenc.c:92
av_fifo_generic_write
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
FFMPEG_AMF_WRITER_ID
#define FFMPEG_AMF_WRITER_ID
Definition: amfenc.c:49
message
Definition: api-threadmessage-test.c:46
NONE
@ NONE
Definition: af_afade.c:54
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:92
av_fifo_grow
int av_fifo_grow(AVFifoBuffer *f, unsigned int size)
Enlarge an AVFifoBuffer.
Definition: fifo.c:107
av_unused
#define av_unused
Definition: attributes.h:131
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
pixdesc.h
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:369
encode.h
data
const char data[16]
Definition: mxf.c:142
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:522
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:145
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:411
av_fifo_generic_read
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
AMFTraceWriter_Flush
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: amfenc.c:99
timestamp_queue_enqueue
static int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
Definition: amfenc.c:435
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:410
FormatMap::amf_format
enum AMF_SURFACE_FORMAT amf_format
Definition: amfenc.c:67
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:545
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:370
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:507
val
static double val(void *priv, double ch)
Definition: aeval.c:76
pts
static int64_t pts
Definition: transcode_aac.c:652
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
av_fifo_space
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:137
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
amf_av_to_amf_format
static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
Definition: amfenc.c:81
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:336
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:76
AmfTraceWriter::avctx
AVCodecContext * avctx
Definition: amfenc.h:40
if
if(ret)
Definition: filter_design.txt:179
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
tracer_vtbl
static AMFTraceWriterVtbl tracer_vtbl
Definition: amfenc.c:103
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:592
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:447
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:51
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
FormatMap::av_format
enum AVPixelFormat av_format
Definition: amfenc.c:66
index
int index
Definition: gxfenc.c:89
AmfTraceWriter
AMF trace writer callback class Used to capture all AMF logging.
Definition: amfenc.h:38
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:96
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:53
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVCodecHWConfigInternal
Definition: hwconfig.h:29
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:368
amf_create_buffer_with_frame_ref
static AMFBuffer * amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
Definition: amfenc.c:565
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:375
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:238
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:313
AVCodec::id
enum AVCodecID id
Definition: codec.h:211
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:99
i
int i
Definition: input.c:407
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:362
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:786
amf_load_library
static int amf_load_library(AVCodecContext *avctx)
Definition: amfenc.c:109
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:223
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:2270
AVCodecContext::height
int height
Definition: avcodec.h:709
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:2218
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
FormatMap
Definition: amfenc.c:65
L
#define L(x)
Definition: vp56_arith.h:36
AVFormatContext::debug
int debug
Flags to enable debugging.
Definition: avformat.h:1517
AVCodecContext
main external API structure.
Definition: avcodec.h:536
av_image_copy
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:82
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
amf_get_property_buffer
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfenc.c:544
amfenc.h
av_buffer_ref
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
planes
static const struct @322 planes[]
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:54
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:796
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:160
av_fifo_size
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
av_fifo_generic_peek_at
int av_fifo_generic_peek_at(AVFifoBuffer *f, void *dest, int offset, int buf_size, void(*func)(void *, void *, int))
Feed data at specific position from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:151
av_fifo_freep
void av_fifo_freep(AVFifoBuffer **f)
Free an AVFifoBuffer and reset pointer to NULL.
Definition: fifo.c:63
format_map
static const FormatMap format_map[]
Definition: amfenc.c:70
AVPacket
This structure stores compressed data.
Definition: packet.h:346
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:563
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:709
av_fifo_alloc
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AmfContext
AMF encoder context.
Definition: amfenc.h:47
amf_release_buffer_with_frame_ref
static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
Definition: amfenc.c:584
hwcontext_d3d11va.h
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
amf_init_context
static int amf_init_context(AVCodecContext *avctx)
Definition: amfenc.c:214