FFmpeg
hwcontext.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "buffer.h"
22 #include "common.h"
23 #include "hwcontext.h"
24 #include "hwcontext_internal.h"
25 #include "imgutils.h"
26 #include "log.h"
27 #include "mem.h"
28 #include "pixdesc.h"
29 #include "pixfmt.h"
30 
31 static const HWContextType * const hw_table[] = {
32 #if CONFIG_CUDA
34 #endif
35 #if CONFIG_D3D11VA
37 #endif
38 #if CONFIG_LIBDRM
40 #endif
41 #if CONFIG_DXVA2
43 #endif
44 #if CONFIG_OPENCL
46 #endif
47 #if CONFIG_QSV
49 #endif
50 #if CONFIG_VAAPI
52 #endif
53 #if CONFIG_VDPAU
55 #endif
56 #if CONFIG_VIDEOTOOLBOX
58 #endif
59 #if CONFIG_MEDIACODEC
61 #endif
62  NULL,
63 };
64 
65 static const char *const hw_type_names[] = {
66  [AV_HWDEVICE_TYPE_CUDA] = "cuda",
67  [AV_HWDEVICE_TYPE_DRM] = "drm",
68  [AV_HWDEVICE_TYPE_DXVA2] = "dxva2",
69  [AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va",
70  [AV_HWDEVICE_TYPE_OPENCL] = "opencl",
71  [AV_HWDEVICE_TYPE_QSV] = "qsv",
72  [AV_HWDEVICE_TYPE_VAAPI] = "vaapi",
73  [AV_HWDEVICE_TYPE_VDPAU] = "vdpau",
74  [AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox",
75  [AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec",
76 };
77 
79 {
80  int type;
81  for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) {
82  if (hw_type_names[type] && !strcmp(hw_type_names[type], name))
83  return type;
84  }
85  return AV_HWDEVICE_TYPE_NONE;
86 }
87 
89 {
92  return hw_type_names[type];
93  else
94  return NULL;
95 }
96 
98 {
99  enum AVHWDeviceType next;
100  int i, set = 0;
101  for (i = 0; hw_table[i]; i++) {
102  if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev)
103  continue;
104  if (!set || hw_table[i]->type < next) {
105  next = hw_table[i]->type;
106  set = 1;
107  }
108  }
109  return set ? next : AV_HWDEVICE_TYPE_NONE;
110 }
111 
112 static const AVClass hwdevice_ctx_class = {
113  .class_name = "AVHWDeviceContext",
114  .item_name = av_default_item_name,
115  .version = LIBAVUTIL_VERSION_INT,
116 };
117 
118 static void hwdevice_ctx_free(void *opaque, uint8_t *data)
119 {
121 
122  /* uninit might still want access the hw context and the user
123  * free() callback might destroy it, so uninit has to be called first */
124  if (ctx->internal->hw_type->device_uninit)
125  ctx->internal->hw_type->device_uninit(ctx);
126 
127  if (ctx->free)
128  ctx->free(ctx);
129 
130  av_buffer_unref(&ctx->internal->source_device);
131 
132  av_freep(&ctx->hwctx);
133  av_freep(&ctx->internal->priv);
134  av_freep(&ctx->internal);
135  av_freep(&ctx);
136 }
137 
139 {
141  AVBufferRef *buf;
142  const HWContextType *hw_type = NULL;
143  int i;
144 
145  for (i = 0; hw_table[i]; i++) {
146  if (hw_table[i]->type == type) {
147  hw_type = hw_table[i];
148  break;
149  }
150  }
151  if (!hw_type)
152  return NULL;
153 
154  ctx = av_mallocz(sizeof(*ctx));
155  if (!ctx)
156  return NULL;
157 
158  ctx->internal = av_mallocz(sizeof(*ctx->internal));
159  if (!ctx->internal)
160  goto fail;
161 
162  if (hw_type->device_priv_size) {
163  ctx->internal->priv = av_mallocz(hw_type->device_priv_size);
164  if (!ctx->internal->priv)
165  goto fail;
166  }
167 
168  if (hw_type->device_hwctx_size) {
169  ctx->hwctx = av_mallocz(hw_type->device_hwctx_size);
170  if (!ctx->hwctx)
171  goto fail;
172  }
173 
174  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
177  if (!buf)
178  goto fail;
179 
180  ctx->type = type;
182 
183  ctx->internal->hw_type = hw_type;
184 
185  return buf;
186 
187 fail:
188  if (ctx->internal)
189  av_freep(&ctx->internal->priv);
190  av_freep(&ctx->internal);
191  av_freep(&ctx->hwctx);
192  av_freep(&ctx);
193  return NULL;
194 }
195 
197 {
199  int ret;
200 
201  if (ctx->internal->hw_type->device_init) {
202  ret = ctx->internal->hw_type->device_init(ctx);
203  if (ret < 0)
204  goto fail;
205  }
206 
207  return 0;
208 fail:
209  if (ctx->internal->hw_type->device_uninit)
210  ctx->internal->hw_type->device_uninit(ctx);
211  return ret;
212 }
213 
214 static const AVClass hwframe_ctx_class = {
215  .class_name = "AVHWFramesContext",
216  .item_name = av_default_item_name,
217  .version = LIBAVUTIL_VERSION_INT,
218 };
219 
220 static void hwframe_ctx_free(void *opaque, uint8_t *data)
221 {
223 
224  if (ctx->internal->pool_internal)
225  av_buffer_pool_uninit(&ctx->internal->pool_internal);
226 
227  if (ctx->internal->hw_type->frames_uninit)
228  ctx->internal->hw_type->frames_uninit(ctx);
229 
230  if (ctx->free)
231  ctx->free(ctx);
232 
233  av_buffer_unref(&ctx->internal->source_frames);
234 
235  av_buffer_unref(&ctx->device_ref);
236 
237  av_freep(&ctx->hwctx);
238  av_freep(&ctx->internal->priv);
239  av_freep(&ctx->internal);
240  av_freep(&ctx);
241 }
242 
244 {
245  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref_in->data;
246  const HWContextType *hw_type = device_ctx->internal->hw_type;
248  AVBufferRef *buf, *device_ref = NULL;
249 
250  ctx = av_mallocz(sizeof(*ctx));
251  if (!ctx)
252  return NULL;
253 
254  ctx->internal = av_mallocz(sizeof(*ctx->internal));
255  if (!ctx->internal)
256  goto fail;
257 
258  if (hw_type->frames_priv_size) {
259  ctx->internal->priv = av_mallocz(hw_type->frames_priv_size);
260  if (!ctx->internal->priv)
261  goto fail;
262  }
263 
264  if (hw_type->frames_hwctx_size) {
265  ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size);
266  if (!ctx->hwctx)
267  goto fail;
268  }
269 
270  device_ref = av_buffer_ref(device_ref_in);
271  if (!device_ref)
272  goto fail;
273 
274  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
277  if (!buf)
278  goto fail;
279 
281  ctx->device_ref = device_ref;
282  ctx->device_ctx = device_ctx;
283  ctx->format = AV_PIX_FMT_NONE;
284  ctx->sw_format = AV_PIX_FMT_NONE;
285 
286  ctx->internal->hw_type = hw_type;
287 
288  return buf;
289 
290 fail:
291  if (device_ref)
292  av_buffer_unref(&device_ref);
293  if (ctx->internal)
294  av_freep(&ctx->internal->priv);
295  av_freep(&ctx->internal);
296  av_freep(&ctx->hwctx);
297  av_freep(&ctx);
298  return NULL;
299 }
300 
302 {
304  AVFrame **frames;
305  int i, ret = 0;
306 
307  frames = av_mallocz_array(ctx->initial_pool_size, sizeof(*frames));
308  if (!frames)
309  return AVERROR(ENOMEM);
310 
311  for (i = 0; i < ctx->initial_pool_size; i++) {
312  frames[i] = av_frame_alloc();
313  if (!frames[i])
314  goto fail;
315 
317  if (ret < 0)
318  goto fail;
319  }
320 
321 fail:
322  for (i = 0; i < ctx->initial_pool_size; i++)
324  av_freep(&frames);
325 
326  return ret;
327 }
328 
330 {
332  const enum AVPixelFormat *pix_fmt;
333  int ret;
334 
335  if (ctx->internal->source_frames) {
336  /* A derived frame context is already initialised. */
337  return 0;
338  }
339 
340  /* validate the pixel format */
341  for (pix_fmt = ctx->internal->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) {
342  if (*pix_fmt == ctx->format)
343  break;
344  }
345  if (*pix_fmt == AV_PIX_FMT_NONE) {
347  "The hardware pixel format '%s' is not supported by the device type '%s'\n",
348  av_get_pix_fmt_name(ctx->format), ctx->internal->hw_type->name);
349  return AVERROR(ENOSYS);
350  }
351 
352  /* validate the dimensions */
353  ret = av_image_check_size(ctx->width, ctx->height, 0, ctx);
354  if (ret < 0)
355  return ret;
356 
357  /* format-specific init */
358  if (ctx->internal->hw_type->frames_init) {
359  ret = ctx->internal->hw_type->frames_init(ctx);
360  if (ret < 0)
361  goto fail;
362  }
363 
364  if (ctx->internal->pool_internal && !ctx->pool)
365  ctx->pool = ctx->internal->pool_internal;
366 
367  /* preallocate the frames in the pool, if requested */
368  if (ctx->initial_pool_size > 0) {
370  if (ret < 0)
371  goto fail;
372  }
373 
374  return 0;
375 fail:
376  if (ctx->internal->hw_type->frames_uninit)
377  ctx->internal->hw_type->frames_uninit(ctx);
378  return ret;
379 }
380 
383  enum AVPixelFormat **formats, int flags)
384 {
385  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
386 
387  if (!ctx->internal->hw_type->transfer_get_formats)
388  return AVERROR(ENOSYS);
389 
390  return ctx->internal->hw_type->transfer_get_formats(ctx, dir, formats);
391 }
392 
393 static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
394 {
395  AVHWFramesContext *ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
396  AVFrame *frame_tmp;
397  int ret = 0;
398 
399  frame_tmp = av_frame_alloc();
400  if (!frame_tmp)
401  return AVERROR(ENOMEM);
402 
403  /* if the format is set, use that
404  * otherwise pick the first supported one */
405  if (dst->format >= 0) {
406  frame_tmp->format = dst->format;
407  } else {
408  enum AVPixelFormat *formats;
409 
410  ret = av_hwframe_transfer_get_formats(src->hw_frames_ctx,
412  &formats, 0);
413  if (ret < 0)
414  goto fail;
415  frame_tmp->format = formats[0];
416  av_freep(&formats);
417  }
418  frame_tmp->width = ctx->width;
419  frame_tmp->height = ctx->height;
420 
421  ret = av_frame_get_buffer(frame_tmp, 32);
422  if (ret < 0)
423  goto fail;
424 
425  ret = av_hwframe_transfer_data(frame_tmp, src, flags);
426  if (ret < 0)
427  goto fail;
428 
429  frame_tmp->width = src->width;
430  frame_tmp->height = src->height;
431 
432  av_frame_move_ref(dst, frame_tmp);
433 
434 fail:
435  av_frame_free(&frame_tmp);
436  return ret;
437 }
438 
440 {
442  int ret;
443 
444  if (!dst->buf[0])
445  return transfer_data_alloc(dst, src, flags);
446 
447  if (src->hw_frames_ctx) {
448  ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
449 
450  ret = ctx->internal->hw_type->transfer_data_from(ctx, dst, src);
451  if (ret < 0)
452  return ret;
453  } else if (dst->hw_frames_ctx) {
455 
456  ret = ctx->internal->hw_type->transfer_data_to(ctx, dst, src);
457  if (ret < 0)
458  return ret;
459  } else
460  return AVERROR(ENOSYS);
461 
462  return 0;
463 }
464 
466 {
467  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
468  int ret;
469 
470  if (ctx->internal->source_frames) {
471  // This is a derived frame context, so we allocate in the source
472  // and map the frame immediately.
473  AVFrame *src_frame;
474 
475  frame->format = ctx->format;
476  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
477  if (!frame->hw_frames_ctx)
478  return AVERROR(ENOMEM);
479 
480  src_frame = av_frame_alloc();
481  if (!src_frame)
482  return AVERROR(ENOMEM);
483 
484  ret = av_hwframe_get_buffer(ctx->internal->source_frames,
485  src_frame, 0);
486  if (ret < 0) {
487  av_frame_free(&src_frame);
488  return ret;
489  }
490 
491  ret = av_hwframe_map(frame, src_frame,
492  ctx->internal->source_allocation_map_flags);
493  if (ret) {
494  av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived "
495  "frame context: %d.\n", ret);
496  av_frame_free(&src_frame);
497  return ret;
498  }
499 
500  // Free the source frame immediately - the mapped frame still
501  // contains a reference to it.
502  av_frame_free(&src_frame);
503 
504  return 0;
505  }
506 
507  if (!ctx->internal->hw_type->frames_get_buffer)
508  return AVERROR(ENOSYS);
509 
510  if (!ctx->pool)
511  return AVERROR(EINVAL);
512 
513  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
514  if (!frame->hw_frames_ctx)
515  return AVERROR(ENOMEM);
516 
517  ret = ctx->internal->hw_type->frames_get_buffer(ctx, frame);
518  if (ret < 0) {
519  av_buffer_unref(&frame->hw_frames_ctx);
520  return ret;
521  }
522 
523  return 0;
524 }
525 
527 {
529  const HWContextType *hw_type = ctx->internal->hw_type;
530 
531  if (hw_type->device_hwconfig_size == 0)
532  return NULL;
533 
534  return av_mallocz(hw_type->device_hwconfig_size);
535 }
536 
538  const void *hwconfig)
539 {
541  const HWContextType *hw_type = ctx->internal->hw_type;
542  AVHWFramesConstraints *constraints;
543 
544  if (!hw_type->frames_get_constraints)
545  return NULL;
546 
547  constraints = av_mallocz(sizeof(*constraints));
548  if (!constraints)
549  return NULL;
550 
551  constraints->min_width = constraints->min_height = 0;
552  constraints->max_width = constraints->max_height = INT_MAX;
553 
554  if (hw_type->frames_get_constraints(ctx, hwconfig, constraints) >= 0) {
555  return constraints;
556  } else {
557  av_hwframe_constraints_free(&constraints);
558  return NULL;
559  }
560 }
561 
563 {
564  if (*constraints) {
565  av_freep(&(*constraints)->valid_hw_formats);
566  av_freep(&(*constraints)->valid_sw_formats);
567  }
568  av_freep(constraints);
569 }
570 
572  const char *device, AVDictionary *opts, int flags)
573 {
574  AVBufferRef *device_ref = NULL;
575  AVHWDeviceContext *device_ctx;
576  int ret = 0;
577 
578  device_ref = av_hwdevice_ctx_alloc(type);
579  if (!device_ref) {
580  ret = AVERROR(ENOMEM);
581  goto fail;
582  }
583  device_ctx = (AVHWDeviceContext*)device_ref->data;
584 
585  if (!device_ctx->internal->hw_type->device_create) {
586  ret = AVERROR(ENOSYS);
587  goto fail;
588  }
589 
590  ret = device_ctx->internal->hw_type->device_create(device_ctx, device,
591  opts, flags);
592  if (ret < 0)
593  goto fail;
594 
595  ret = av_hwdevice_ctx_init(device_ref);
596  if (ret < 0)
597  goto fail;
598 
599  *pdevice_ref = device_ref;
600  return 0;
601 fail:
602  av_buffer_unref(&device_ref);
603  *pdevice_ref = NULL;
604  return ret;
605 }
606 
608  enum AVHWDeviceType type,
609  AVBufferRef *src_ref, int flags)
610 {
611  AVBufferRef *dst_ref = NULL, *tmp_ref;
612  AVHWDeviceContext *dst_ctx, *tmp_ctx;
613  int ret = 0;
614 
615  tmp_ref = src_ref;
616  while (tmp_ref) {
617  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
618  if (tmp_ctx->type == type) {
619  dst_ref = av_buffer_ref(tmp_ref);
620  if (!dst_ref) {
621  ret = AVERROR(ENOMEM);
622  goto fail;
623  }
624  goto done;
625  }
626  tmp_ref = tmp_ctx->internal->source_device;
627  }
628 
629  dst_ref = av_hwdevice_ctx_alloc(type);
630  if (!dst_ref) {
631  ret = AVERROR(ENOMEM);
632  goto fail;
633  }
634  dst_ctx = (AVHWDeviceContext*)dst_ref->data;
635 
636  tmp_ref = src_ref;
637  while (tmp_ref) {
638  tmp_ctx = (AVHWDeviceContext*)tmp_ref->data;
639  if (dst_ctx->internal->hw_type->device_derive) {
640  ret = dst_ctx->internal->hw_type->device_derive(dst_ctx,
641  tmp_ctx,
642  flags);
643  if (ret == 0) {
644  dst_ctx->internal->source_device = av_buffer_ref(src_ref);
645  if (!dst_ctx->internal->source_device) {
646  ret = AVERROR(ENOMEM);
647  goto fail;
648  }
649  ret = av_hwdevice_ctx_init(dst_ref);
650  if (ret < 0)
651  goto fail;
652  goto done;
653  }
654  if (ret != AVERROR(ENOSYS))
655  goto fail;
656  }
657  tmp_ref = tmp_ctx->internal->source_device;
658  }
659 
660  ret = AVERROR(ENOSYS);
661  goto fail;
662 
663 done:
664  *dst_ref_ptr = dst_ref;
665  return 0;
666 
667 fail:
668  av_buffer_unref(&dst_ref);
669  *dst_ref_ptr = NULL;
670  return ret;
671 }
672 
673 static void ff_hwframe_unmap(void *opaque, uint8_t *data)
674 {
676  AVHWFramesContext *ctx = opaque;
677 
678  if (hwmap->unmap)
679  hwmap->unmap(ctx, hwmap);
680 
681  av_frame_free(&hwmap->source);
682 
684 
685  av_free(hwmap);
686 }
687 
689  AVFrame *dst, const AVFrame *src,
690  void (*unmap)(AVHWFramesContext *ctx,
691  HWMapDescriptor *hwmap),
692  void *priv)
693 {
694  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
695  HWMapDescriptor *hwmap;
696  int ret;
697 
698  hwmap = av_mallocz(sizeof(*hwmap));
699  if (!hwmap) {
700  ret = AVERROR(ENOMEM);
701  goto fail;
702  }
703 
704  hwmap->source = av_frame_alloc();
705  if (!hwmap->source) {
706  ret = AVERROR(ENOMEM);
707  goto fail;
708  }
709  ret = av_frame_ref(hwmap->source, src);
710  if (ret < 0)
711  goto fail;
712 
713  hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref);
714  if (!hwmap->hw_frames_ctx) {
715  ret = AVERROR(ENOMEM);
716  goto fail;
717  }
718 
719  hwmap->unmap = unmap;
720  hwmap->priv = priv;
721 
722  dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap),
723  &ff_hwframe_unmap, ctx, 0);
724  if (!dst->buf[0]) {
725  ret = AVERROR(ENOMEM);
726  goto fail;
727  }
728 
729  return 0;
730 
731 fail:
732  if (hwmap) {
733  av_buffer_unref(&hwmap->hw_frames_ctx);
734  av_frame_free(&hwmap->source);
735  }
736  av_free(hwmap);
737  return ret;
738 }
739 
740 int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
741 {
742  AVHWFramesContext *src_frames, *dst_frames;
743  HWMapDescriptor *hwmap;
744  int ret;
745 
746  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
747  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
748  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
749 
750  if ((src_frames == dst_frames &&
751  src->format == dst_frames->sw_format &&
752  dst->format == dst_frames->format) ||
753  (src_frames->internal->source_frames &&
754  src_frames->internal->source_frames->data ==
755  (uint8_t*)dst_frames)) {
756  // This is an unmap operation. We don't need to directly
757  // do anything here other than fill in the original frame,
758  // because the real unmap will be invoked when the last
759  // reference to the mapped frame disappears.
760  if (!src->buf[0]) {
761  av_log(src_frames, AV_LOG_ERROR, "Invalid mapping "
762  "found when attempting unmap.\n");
763  return AVERROR(EINVAL);
764  }
765  hwmap = (HWMapDescriptor*)src->buf[0]->data;
766  av_frame_unref(dst);
767  return av_frame_ref(dst, hwmap->source);
768  }
769  }
770 
771  if (src->hw_frames_ctx) {
772  src_frames = (AVHWFramesContext*)src->hw_frames_ctx->data;
773 
774  if (src_frames->format == src->format &&
775  src_frames->internal->hw_type->map_from) {
776  ret = src_frames->internal->hw_type->map_from(src_frames,
777  dst, src, flags);
778  if (ret != AVERROR(ENOSYS))
779  return ret;
780  }
781  }
782 
783  if (dst->hw_frames_ctx) {
784  dst_frames = (AVHWFramesContext*)dst->hw_frames_ctx->data;
785 
786  if (dst_frames->format == dst->format &&
787  dst_frames->internal->hw_type->map_to) {
788  ret = dst_frames->internal->hw_type->map_to(dst_frames,
789  dst, src, flags);
790  if (ret != AVERROR(ENOSYS))
791  return ret;
792  }
793  }
794 
795  return AVERROR(ENOSYS);
796 }
797 
799  enum AVPixelFormat format,
800  AVBufferRef *derived_device_ctx,
801  AVBufferRef *source_frame_ctx,
802  int flags)
803 {
804  AVBufferRef *dst_ref = NULL;
805  AVHWFramesContext *dst = NULL;
806  AVHWFramesContext *src = (AVHWFramesContext*)source_frame_ctx->data;
807  int ret;
808 
809  if (src->internal->source_frames) {
810  AVHWFramesContext *src_src =
811  (AVHWFramesContext*)src->internal->source_frames->data;
812  AVHWDeviceContext *dst_dev =
813  (AVHWDeviceContext*)derived_device_ctx->data;
814 
815  if (src_src->device_ctx == dst_dev) {
816  // This is actually an unmapping, so we just return a
817  // reference to the source frame context.
818  *derived_frame_ctx =
819  av_buffer_ref(src->internal->source_frames);
820  if (!*derived_frame_ctx) {
821  ret = AVERROR(ENOMEM);
822  goto fail;
823  }
824  return 0;
825  }
826  }
827 
828  dst_ref = av_hwframe_ctx_alloc(derived_device_ctx);
829  if (!dst_ref) {
830  ret = AVERROR(ENOMEM);
831  goto fail;
832  }
833 
834  dst = (AVHWFramesContext*)dst_ref->data;
835 
836  dst->format = format;
837  dst->sw_format = src->sw_format;
838  dst->width = src->width;
839  dst->height = src->height;
840 
841  dst->internal->source_frames = av_buffer_ref(source_frame_ctx);
842  if (!dst->internal->source_frames) {
843  ret = AVERROR(ENOMEM);
844  goto fail;
845  }
846 
852 
853  ret = AVERROR(ENOSYS);
854  if (src->internal->hw_type->frames_derive_from)
855  ret = src->internal->hw_type->frames_derive_from(dst, src, flags);
856  if (ret == AVERROR(ENOSYS) &&
858  ret = dst->internal->hw_type->frames_derive_to(dst, src, flags);
859  if (ret == AVERROR(ENOSYS))
860  ret = 0;
861  if (ret)
862  goto fail;
863 
864  *derived_frame_ctx = dst_ref;
865  return 0;
866 
867 fail:
868  if (dst)
870  av_buffer_unref(&dst_ref);
871  return ret;
872 }
873 
875 {
876  HWMapDescriptor *hwmap = (HWMapDescriptor*)dst->buf[0]->data;
877  av_frame_unref(hwmap->source);
878  return av_frame_ref(hwmap->source, src);
879 }
formats
formats
Definition: signature.h:48
hwframe_ctx_free
static void hwframe_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:220
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVHWDeviceInternal::source_device
AVBufferRef * source_device
For a derived device, a reference to the original device context it was derived from.
Definition: hwcontext_internal.h:110
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AV_HWFRAME_TRANSFER_DIRECTION_FROM
@ AV_HWFRAME_TRANSFER_DIRECTION_FROM
Transfer the data from the queried hw frame.
Definition: hwcontext.h:398
HWMapDescriptor::source
AVFrame * source
A reference to the original source of the mapping.
Definition: hwcontext_internal.h:135
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:324
transfer_data_alloc
static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext.c:393
av_hwdevice_hwconfig_alloc
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:526
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:89
ff_hwframe_unmap
static void ff_hwframe_unmap(void *opaque, uint8_t *data)
Definition: hwcontext.c:673
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:208
hwdevice_ctx_free
static void hwdevice_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:118
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:329
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
pixdesc.h
HWContextType::device_derive
int(* device_derive)(AVHWDeviceContext *dst_ctx, AVHWDeviceContext *src_ctx, int flags)
Definition: hwcontext_internal.h:69
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:243
name
const char * name
Definition: avisynth_c.h:867
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1253
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:740
ff_hwcontext_type_drm
const HWContextType ff_hwcontext_type_drm
Definition: hwcontext_drm.c:270
data
const char data[16]
Definition: mxf.c:91
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
HWContextType::map_to
int(* map_to)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:91
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:490
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:78
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:191
ff_hwcontext_type_vaapi
const HWContextType ff_hwcontext_type_vaapi
Definition: hwcontext_vaapi.c:1661
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
HWContextType::map_from
int(* map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:93
av_hwdevice_iterate_types
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
Iterate over supported device types.
Definition: hwcontext.c:97
AVHWDeviceContext::internal
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:70
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:133
AVDictionary
Definition: dict.c:30
HWMapDescriptor::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the hardware frames context in which this mapping was made.
Definition: hwcontext_internal.h:141
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:688
AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
Definition: hwcontext.h:34
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:150
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
av_hwdevice_get_hwframe_constraints
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:537
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:196
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:486
AVFormatContext::internal
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1795
AVHWFramesInternal::source_frames
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
Definition: hwcontext_internal.h:123
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:432
AV_HWDEVICE_TYPE_CUDA
@ AV_HWDEVICE_TYPE_CUDA
Definition: hwcontext.h:30
ff_hwcontext_type_d3d11va
const HWContextType ff_hwcontext_type_d3d11va
Definition: hwcontext_d3d11va.c:595
fail
#define fail()
Definition: checkasm.h:120
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_hwcontext_type_mediacodec
const HWContextType ff_hwcontext_type_mediacodec
Definition: hwcontext_mediacodec.c:38
src
#define src
Definition: vp8dsp.c:254
AVHWFramesInternal::source_allocation_map_flags
int source_allocation_map_flags
Flags to apply to the mapping from the source to the derived frame context when trying to allocate in...
Definition: hwcontext_internal.h:128
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
hwframe_pool_prealloc
static int hwframe_pool_prealloc(AVBufferRef *ref)
Definition: hwcontext.c:301
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
ff_hwcontext_type_dxva2
const HWContextType ff_hwcontext_type_dxva2
Definition: hwcontext_dxva2.c:576
buf
void * buf
Definition: avisynth_c.h:766
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
AVHWFramesContext::height
int height
Definition: hwcontext.h:228
hw_type_names
static const char *const hw_type_names[]
Definition: hwcontext.c:65
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:138
hw_table
static const HWContextType *const hw_table[]
Definition: hwcontext.c:31
av_hwframe_constraints_free
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:562
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
HWContextType::device_create
int(* device_create)(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:67
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:88
ff_hwcontext_type_videotoolbox
const HWContextType ff_hwcontext_type_videotoolbox
Definition: hwcontext_videotoolbox.c:235
if
if(ret)
Definition: filter_design.txt:179
opts
AVDictionary * opts
Definition: movenc.c:50
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:275
av_hwframe_ctx_create_derived
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
Definition: hwcontext.c:798
HWContextType::frames_derive_to
int(* frames_derive_to)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_internal.h:96
AV_HWDEVICE_TYPE_OPENCL
@ AV_HWDEVICE_TYPE_OPENCL
Definition: hwcontext.h:37
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:519
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
ff_hwcontext_type_cuda
const HWContextType ff_hwcontext_type_cuda
Definition: hwcontext_cuda.c:363
AVHWFramesInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:114
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:513
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:368
buffer.h
hwframe_ctx_class
static const AVClass hwframe_ctx_class
Definition: hwcontext.c:214
hwdevice_ctx_class
static const AVClass hwdevice_ctx_class
Definition: hwcontext.c:112
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:503
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:607
HWContextType::device_hwctx_size
size_t device_hwctx_size
size of the public hardware-specific context, i.e.
Definition: hwcontext_internal.h:43
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
common.h
HWMapDescriptor::unmap
void(* unmap)(AVHWFramesContext *ctx, struct HWMapDescriptor *hwmap)
Unmap function.
Definition: hwcontext_internal.h:145
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:582
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
AVHWDeviceInternal::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:103
ff_hwframe_map_replace
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
Definition: hwcontext.c:874
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:394
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:78
pixfmt.h
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:148
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:571
AVFormatContext::av_class
const AVClass * av_class
A class for logging and AVOptions.
Definition: avformat.h:1347
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:439
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:634
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
config.h
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
av_buffer_ref
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
HWContextType::device_priv_size
size_t device_priv_size
size of the private data, i.e.
Definition: hwcontext_internal.h:48
av_hwframe_transfer_get_formats
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
Definition: hwcontext.c:381
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:81
hwcontext_internal.h
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:507
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:565
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
HWContextType
Definition: hwcontext_internal.h:29
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:282
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:465
HWMapDescriptor
Definition: hwcontext_internal.h:131
ff_hwcontext_type_opencl
const HWContextType ff_hwcontext_type_opencl
Definition: hwcontext_opencl.c:2911
AV_HWDEVICE_TYPE_DRM
@ AV_HWDEVICE_TYPE_DRM
Definition: hwcontext.h:36
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2438