FFmpeg
hwcontext_vulkan.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
21 
22 #ifdef _WIN32
23 #include <windows.h> /* Included to prevent conflicts with CreateSemaphore */
24 #include <versionhelpers.h>
25 #include "compat/w32dlfcn.h"
26 #else
27 #include <dlfcn.h>
28 #endif
29 
30 #include <unistd.h>
31 
32 #include "config.h"
33 #include "pixdesc.h"
34 #include "avstring.h"
35 #include "imgutils.h"
36 #include "hwcontext.h"
37 #include "avassert.h"
38 #include "hwcontext_internal.h"
39 #include "hwcontext_vulkan.h"
40 
41 #include "vulkan.h"
42 #include "vulkan_loader.h"
43 
44 #if CONFIG_LIBDRM
45 #include <xf86drm.h>
46 #include <drm_fourcc.h>
47 #include "hwcontext_drm.h"
48 #if CONFIG_VAAPI
49 #include <va/va_drmcommon.h>
50 #include "hwcontext_vaapi.h"
51 #endif
52 #endif
53 
54 #if CONFIG_CUDA
56 #include "cuda_check.h"
57 #define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
58 #endif
59 
60 typedef struct VulkanQueueCtx {
61  VkFence fence;
62  VkQueue queue;
64 
65  /* Buffer dependencies */
70 
71 typedef struct VulkanExecCtx {
72  VkCommandPool pool;
73  VkCommandBuffer *bufs;
75  int nb_queues;
78 
79 typedef struct VulkanDevicePriv {
80  /* Vulkan library and loader functions */
81  void *libvulkan;
83 
84  /* Properties */
85  VkPhysicalDeviceProperties2 props;
86  VkPhysicalDeviceMemoryProperties mprops;
87  VkPhysicalDeviceExternalMemoryHostPropertiesEXT hprops;
88 
89  /* Features */
90  VkPhysicalDeviceVulkan11Features device_features_1_1;
91  VkPhysicalDeviceVulkan12Features device_features_1_2;
92 
93  /* Queues */
94  uint32_t qfs[5];
95  int num_qfs;
96 
97  /* Debug callback */
98  VkDebugUtilsMessengerEXT debug_ctx;
99 
100  /* Extensions */
102 
103  /* Settings */
105 
106  /* Option to allocate all image planes in a single allocation */
108 
109  /* Nvidia */
111 
112  /* Intel */
115 
116 typedef struct VulkanFramesPriv {
117  /* Image conversions */
119 
120  /* Image transfers */
123 
124  /* Modifier info list to free at uninit */
125  VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
127 
128 typedef struct AVVkFrameInternal {
129 #if CONFIG_CUDA
130  /* Importing external memory into cuda is really expensive so we keep the
131  * memory imported all the time */
132  AVBufferRef *cuda_fc_ref; /* Need to keep it around for uninit */
133  CUexternalMemory ext_mem[AV_NUM_DATA_POINTERS];
134  CUmipmappedArray cu_mma[AV_NUM_DATA_POINTERS];
135  CUarray cu_array[AV_NUM_DATA_POINTERS];
136  CUexternalSemaphore cu_sem[AV_NUM_DATA_POINTERS];
137 #ifdef _WIN32
138  HANDLE ext_mem_handle[AV_NUM_DATA_POINTERS];
139  HANDLE ext_sem_handle[AV_NUM_DATA_POINTERS];
140 #endif
141 #endif
143 
144 #define ADD_VAL_TO_LIST(list, count, val) \
145  do { \
146  list = av_realloc_array(list, sizeof(*list), ++count); \
147  if (!list) { \
148  err = AVERROR(ENOMEM); \
149  goto fail; \
150  } \
151  list[count - 1] = av_strdup(val); \
152  if (!list[count - 1]) { \
153  err = AVERROR(ENOMEM); \
154  goto fail; \
155  } \
156  } while(0)
157 
158 #define RELEASE_PROPS(props, count) \
159  if (props) { \
160  for (int i = 0; i < count; i++) \
161  av_free((void *)((props)[i])); \
162  av_free((void *)props); \
163  }
164 
165 static const struct {
167  const VkFormat vkfmts[4];
168 } vk_pixfmt_map[] = {
169  { AV_PIX_FMT_GRAY8, { VK_FORMAT_R8_UNORM } },
170  { AV_PIX_FMT_GRAY16, { VK_FORMAT_R16_UNORM } },
171  { AV_PIX_FMT_GRAYF32, { VK_FORMAT_R32_SFLOAT } },
172 
173  { AV_PIX_FMT_NV12, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
174  { AV_PIX_FMT_NV21, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
175  { AV_PIX_FMT_P010, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
176  { AV_PIX_FMT_P012, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
177  { AV_PIX_FMT_P016, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
178 
179  { AV_PIX_FMT_NV16, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
180 
181  { AV_PIX_FMT_NV24, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
182  { AV_PIX_FMT_NV42, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
183 
184  { AV_PIX_FMT_YUV420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
185  { AV_PIX_FMT_YUV420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
186  { AV_PIX_FMT_YUV420P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
187  { AV_PIX_FMT_YUV420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
188 
189  { AV_PIX_FMT_YUV422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
190  { AV_PIX_FMT_YUV422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
191  { AV_PIX_FMT_YUV422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
192  { AV_PIX_FMT_YUV422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
193 
194  { AV_PIX_FMT_YUV444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
195  { AV_PIX_FMT_YUV444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
196  { AV_PIX_FMT_YUV444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
197  { AV_PIX_FMT_YUV444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
198 
199  { AV_PIX_FMT_YUVA420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
200  { AV_PIX_FMT_YUVA420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
201  /* There is no AV_PIX_FMT_YUVA420P12 */
202  { AV_PIX_FMT_YUVA420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
203 
204  { AV_PIX_FMT_YUVA422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
205  { AV_PIX_FMT_YUVA422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
206  { AV_PIX_FMT_YUVA422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
207  { AV_PIX_FMT_YUVA422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
208 
209  { AV_PIX_FMT_YUVA444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
210  { AV_PIX_FMT_YUVA444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
211  { AV_PIX_FMT_YUVA444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
212  { AV_PIX_FMT_YUVA444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
213 
214  { AV_PIX_FMT_VUYX, { VK_FORMAT_R8G8B8A8_UNORM } },
215  { AV_PIX_FMT_XV36, { VK_FORMAT_R16G16B16A16_UNORM } },
216 
217  { AV_PIX_FMT_BGRA, { VK_FORMAT_B8G8R8A8_UNORM } },
218  { AV_PIX_FMT_RGBA, { VK_FORMAT_R8G8B8A8_UNORM } },
219  { AV_PIX_FMT_RGB24, { VK_FORMAT_R8G8B8_UNORM } },
220  { AV_PIX_FMT_BGR24, { VK_FORMAT_B8G8R8_UNORM } },
221  { AV_PIX_FMT_RGB48, { VK_FORMAT_R16G16B16_UNORM } },
222  { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
223  { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
224  { AV_PIX_FMT_RGB565, { VK_FORMAT_R5G6B5_UNORM_PACK16 } },
225  { AV_PIX_FMT_BGR565, { VK_FORMAT_B5G6R5_UNORM_PACK16 } },
226  { AV_PIX_FMT_BGR0, { VK_FORMAT_B8G8R8A8_UNORM } },
227  { AV_PIX_FMT_RGB0, { VK_FORMAT_R8G8B8A8_UNORM } },
228 
229  /* Lower priority as there's an endianess-dependent overlap between these
230  * and rgba/bgr0, and PACK32 formats are more limited */
231  { AV_PIX_FMT_BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
232  { AV_PIX_FMT_0BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
233 
234  { AV_PIX_FMT_X2RGB10, { VK_FORMAT_A2R10G10B10_UNORM_PACK32 } },
235 
236  { AV_PIX_FMT_GBRAP, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
237  { AV_PIX_FMT_GBRAP16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
238  { AV_PIX_FMT_GBRPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
239  { AV_PIX_FMT_GBRAPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
240 };
241 
243 {
244  for (enum AVPixelFormat i = 0; i < FF_ARRAY_ELEMS(vk_pixfmt_map); i++)
245  if (vk_pixfmt_map[i].pixfmt == p)
246  return vk_pixfmt_map[i].vkfmts;
247  return NULL;
248 }
249 
250 static const void *vk_find_struct(const void *chain, VkStructureType stype)
251 {
252  const VkBaseInStructure *in = chain;
253  while (in) {
254  if (in->sType == stype)
255  return in;
256 
257  in = in->pNext;
258  }
259 
260  return NULL;
261 }
262 
263 static void vk_link_struct(void *chain, void *in)
264 {
265  VkBaseOutStructure *out = chain;
266  if (!in)
267  return;
268 
269  while (out->pNext)
270  out = out->pNext;
271 
272  out->pNext = in;
273 }
274 
276  int linear)
277 {
278  AVVulkanDeviceContext *hwctx = dev_ctx->hwctx;
279  VulkanDevicePriv *priv = dev_ctx->internal->priv;
280  FFVulkanFunctions *vk = &priv->vkfn;
281  const VkFormat *fmt = av_vkfmt_from_pixfmt(p);
283 
284  if (!fmt)
285  return 0;
286 
287  for (int i = 0; i < planes; i++) {
288  VkFormatFeatureFlags flags;
289  VkFormatProperties2 prop = {
290  .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
291  };
292  vk->GetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt[i], &prop);
293  flags = linear ? prop.formatProperties.linearTilingFeatures :
294  prop.formatProperties.optimalTilingFeatures;
296  return 0;
297  }
298 
299  return 1;
300 }
301 
303 {
304  AVVulkanDeviceContext *hwctx = ctx->hwctx;
305  VulkanDevicePriv *p = ctx->internal->priv;
306 
307  static const char *lib_names[] = {
308 #if defined(_WIN32)
309  "vulkan-1.dll",
310 #elif defined(__APPLE__)
311  "libvulkan.dylib",
312  "libvulkan.1.dylib",
313  "libMoltenVK.dylib",
314 #else
315  "libvulkan.so.1",
316  "libvulkan.so",
317 #endif
318  };
319 
320  for (int i = 0; i < FF_ARRAY_ELEMS(lib_names); i++) {
321  p->libvulkan = dlopen(lib_names[i], RTLD_NOW | RTLD_LOCAL);
322  if (p->libvulkan)
323  break;
324  }
325 
326  if (!p->libvulkan) {
327  av_log(ctx, AV_LOG_ERROR, "Unable to open the libvulkan library!\n");
328  return AVERROR_UNKNOWN;
329  }
330 
331  hwctx->get_proc_addr = (PFN_vkGetInstanceProcAddr)dlsym(p->libvulkan, "vkGetInstanceProcAddr");
332 
333  return 0;
334 }
335 
336 typedef struct VulkanOptExtension {
337  const char *name;
340 
342  /* For future use */
343 };
344 
346  /* Misc or required by other extensions */
347  { VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
348  { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
349  { VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
350 
351  /* Imports/exports */
352  { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_FD_MEMORY },
353  { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_DMABUF_MEMORY },
354  { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, FF_VK_EXT_DRM_MODIFIER_FLAGS },
355  { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_FD_SEM },
356  { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_HOST_MEMORY },
357 #ifdef _WIN32
358  { VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_WIN32_MEMORY },
359  { VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_WIN32_SEM },
360 #endif
361 };
362 
363 /* Converts return values to strings */
364 static const char *vk_ret2str(VkResult res)
365 {
366 #define CASE(VAL) case VAL: return #VAL
367  switch (res) {
368  CASE(VK_SUCCESS);
369  CASE(VK_NOT_READY);
370  CASE(VK_TIMEOUT);
371  CASE(VK_EVENT_SET);
372  CASE(VK_EVENT_RESET);
373  CASE(VK_INCOMPLETE);
374  CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
375  CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
376  CASE(VK_ERROR_INITIALIZATION_FAILED);
377  CASE(VK_ERROR_DEVICE_LOST);
378  CASE(VK_ERROR_MEMORY_MAP_FAILED);
379  CASE(VK_ERROR_LAYER_NOT_PRESENT);
380  CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
381  CASE(VK_ERROR_FEATURE_NOT_PRESENT);
382  CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
383  CASE(VK_ERROR_TOO_MANY_OBJECTS);
384  CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
385  CASE(VK_ERROR_FRAGMENTED_POOL);
386  CASE(VK_ERROR_SURFACE_LOST_KHR);
387  CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
388  CASE(VK_SUBOPTIMAL_KHR);
389  CASE(VK_ERROR_OUT_OF_DATE_KHR);
390  CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
391  CASE(VK_ERROR_VALIDATION_FAILED_EXT);
392  CASE(VK_ERROR_INVALID_SHADER_NV);
393  CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
394  CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
395  CASE(VK_ERROR_NOT_PERMITTED_EXT);
396  CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
397  CASE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
398  CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
399  default: return "Unknown error";
400  }
401 #undef CASE
402 }
403 
404 static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
405  VkDebugUtilsMessageTypeFlagsEXT messageType,
406  const VkDebugUtilsMessengerCallbackDataEXT *data,
407  void *priv)
408 {
409  int l;
410  AVHWDeviceContext *ctx = priv;
411 
412  switch (severity) {
413  case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
414  case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l = AV_LOG_INFO; break;
415  case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
416  case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l = AV_LOG_ERROR; break;
417  default: l = AV_LOG_DEBUG; break;
418  }
419 
420  av_log(ctx, l, "%s\n", data->pMessage);
421  for (int i = 0; i < data->cmdBufLabelCount; i++)
422  av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
423 
424  return 0;
425 }
426 
428  const char * const **dst, uint32_t *num, int debug)
429 {
430  const char *tstr;
431  const char **extension_names = NULL;
432  VulkanDevicePriv *p = ctx->internal->priv;
433  FFVulkanFunctions *vk = &p->vkfn;
434  AVVulkanDeviceContext *hwctx = ctx->hwctx;
435  int err = 0, found, extensions_found = 0;
436 
437  const char *mod;
438  int optional_exts_num;
439  uint32_t sup_ext_count;
440  char *user_exts_str = NULL;
441  AVDictionaryEntry *user_exts;
442  VkExtensionProperties *sup_ext;
443  const VulkanOptExtension *optional_exts;
444 
445  if (!dev) {
446  mod = "instance";
447  optional_exts = optional_instance_exts;
448  optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
449  user_exts = av_dict_get(opts, "instance_extensions", NULL, 0);
450  if (user_exts) {
451  user_exts_str = av_strdup(user_exts->value);
452  if (!user_exts_str) {
453  err = AVERROR(ENOMEM);
454  goto fail;
455  }
456  }
457  vk->EnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
458  sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
459  if (!sup_ext)
460  return AVERROR(ENOMEM);
461  vk->EnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
462  } else {
463  mod = "device";
464  optional_exts = optional_device_exts;
465  optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
466  user_exts = av_dict_get(opts, "device_extensions", NULL, 0);
467  if (user_exts) {
468  user_exts_str = av_strdup(user_exts->value);
469  if (!user_exts_str) {
470  err = AVERROR(ENOMEM);
471  goto fail;
472  }
473  }
474  vk->EnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
475  &sup_ext_count, NULL);
476  sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
477  if (!sup_ext)
478  return AVERROR(ENOMEM);
479  vk->EnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
480  &sup_ext_count, sup_ext);
481  }
482 
483  for (int i = 0; i < optional_exts_num; i++) {
484  tstr = optional_exts[i].name;
485  found = 0;
486  for (int j = 0; j < sup_ext_count; j++) {
487  if (!strcmp(tstr, sup_ext[j].extensionName)) {
488  found = 1;
489  break;
490  }
491  }
492  if (!found)
493  continue;
494 
495  av_log(ctx, AV_LOG_VERBOSE, "Using %s extension %s\n", mod, tstr);
496  p->extensions |= optional_exts[i].flag;
497  ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
498  }
499 
500  if (debug && !dev) {
501  tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
502  found = 0;
503  for (int j = 0; j < sup_ext_count; j++) {
504  if (!strcmp(tstr, sup_ext[j].extensionName)) {
505  found = 1;
506  break;
507  }
508  }
509  if (found) {
510  av_log(ctx, AV_LOG_VERBOSE, "Using %s extension %s\n", mod, tstr);
511  ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
513  } else {
514  av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
515  tstr);
516  err = AVERROR(EINVAL);
517  goto fail;
518  }
519  }
520 
521  if (user_exts_str) {
522  char *save, *token = av_strtok(user_exts_str, "+", &save);
523  while (token) {
524  found = 0;
525  for (int j = 0; j < sup_ext_count; j++) {
526  if (!strcmp(token, sup_ext[j].extensionName)) {
527  found = 1;
528  break;
529  }
530  }
531  if (found) {
532  av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, token);
533  ADD_VAL_TO_LIST(extension_names, extensions_found, token);
534  } else {
535  av_log(ctx, AV_LOG_WARNING, "%s extension \"%s\" not found, excluding.\n",
536  mod, token);
537  }
538  token = av_strtok(NULL, "+", &save);
539  }
540  }
541 
542  *dst = extension_names;
543  *num = extensions_found;
544 
545  av_free(user_exts_str);
546  av_free(sup_ext);
547  return 0;
548 
549 fail:
550  RELEASE_PROPS(extension_names, extensions_found);
551  av_free(user_exts_str);
552  av_free(sup_ext);
553  return err;
554 }
555 
557  const char * const **dst, uint32_t *num,
558  int *debug_mode)
559 {
560  static const char default_layer[] = { "VK_LAYER_KHRONOS_validation" };
561 
562  int found = 0, err = 0;
563  VulkanDevicePriv *priv = ctx->internal->priv;
564  FFVulkanFunctions *vk = &priv->vkfn;
565 
566  uint32_t sup_layer_count;
567  VkLayerProperties *sup_layers;
568 
569  AVDictionaryEntry *user_layers;
570  char *user_layers_str = NULL;
571  char *save, *token;
572 
573  const char **enabled_layers = NULL;
574  uint32_t enabled_layers_count = 0;
575 
576  AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
577  int debug = debug_opt && strtol(debug_opt->value, NULL, 10);
578 
579  /* If `debug=0`, enable no layers at all. */
580  if (debug_opt && !debug)
581  return 0;
582 
583  vk->EnumerateInstanceLayerProperties(&sup_layer_count, NULL);
584  sup_layers = av_malloc_array(sup_layer_count, sizeof(VkLayerProperties));
585  if (!sup_layers)
586  return AVERROR(ENOMEM);
587  vk->EnumerateInstanceLayerProperties(&sup_layer_count, sup_layers);
588 
589  av_log(ctx, AV_LOG_VERBOSE, "Supported validation layers:\n");
590  for (int i = 0; i < sup_layer_count; i++)
591  av_log(ctx, AV_LOG_VERBOSE, "\t%s\n", sup_layers[i].layerName);
592 
593  /* If `debug=1` is specified, enable the standard validation layer extension */
594  if (debug) {
595  *debug_mode = debug;
596  for (int i = 0; i < sup_layer_count; i++) {
597  if (!strcmp(default_layer, sup_layers[i].layerName)) {
598  found = 1;
599  av_log(ctx, AV_LOG_VERBOSE, "Default validation layer %s is enabled\n",
600  default_layer);
601  ADD_VAL_TO_LIST(enabled_layers, enabled_layers_count, default_layer);
602  break;
603  }
604  }
605  }
606 
607  user_layers = av_dict_get(opts, "validation_layers", NULL, 0);
608  if (!user_layers)
609  goto end;
610 
611  user_layers_str = av_strdup(user_layers->value);
612  if (!user_layers_str) {
613  err = AVERROR(ENOMEM);
614  goto fail;
615  }
616 
617  token = av_strtok(user_layers_str, "+", &save);
618  while (token) {
619  found = 0;
620  if (!strcmp(default_layer, token)) {
621  if (debug) {
622  /* if the `debug=1`, default_layer is enabled, skip here */
623  token = av_strtok(NULL, "+", &save);
624  continue;
625  } else {
626  /* if the `debug=0`, enable debug mode to load its callback properly */
627  *debug_mode = debug;
628  }
629  }
630  for (int j = 0; j < sup_layer_count; j++) {
631  if (!strcmp(token, sup_layers[j].layerName)) {
632  found = 1;
633  break;
634  }
635  }
636  if (found) {
637  av_log(ctx, AV_LOG_VERBOSE, "Requested Validation Layer: %s\n", token);
638  ADD_VAL_TO_LIST(enabled_layers, enabled_layers_count, token);
639  } else {
641  "Validation Layer \"%s\" not support.\n", token);
642  err = AVERROR(EINVAL);
643  goto fail;
644  }
645  token = av_strtok(NULL, "+", &save);
646  }
647 
648  av_free(user_layers_str);
649 
650 end:
651  av_free(sup_layers);
652 
653  *dst = enabled_layers;
654  *num = enabled_layers_count;
655 
656  return 0;
657 
658 fail:
659  RELEASE_PROPS(enabled_layers, enabled_layers_count);
660  av_free(sup_layers);
661  av_free(user_layers_str);
662  return err;
663 }
664 
665 /* Creates a VkInstance */
667 {
668  int err = 0, debug_mode = 0;
669  VkResult ret;
670  VulkanDevicePriv *p = ctx->internal->priv;
671  FFVulkanFunctions *vk = &p->vkfn;
672  AVVulkanDeviceContext *hwctx = ctx->hwctx;
673  VkApplicationInfo application_info = {
674  .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
675  .pEngineName = "libavutil",
676  .apiVersion = VK_API_VERSION_1_2,
677  .engineVersion = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
680  };
681  VkInstanceCreateInfo inst_props = {
682  .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
683  .pApplicationInfo = &application_info,
684  };
685 
686  if (!hwctx->get_proc_addr) {
687  err = load_libvulkan(ctx);
688  if (err < 0)
689  return err;
690  }
691 
692  err = ff_vk_load_functions(ctx, vk, p->extensions, 0, 0);
693  if (err < 0) {
694  av_log(ctx, AV_LOG_ERROR, "Unable to load instance enumeration functions!\n");
695  return err;
696  }
697 
698  err = check_validation_layers(ctx, opts, &inst_props.ppEnabledLayerNames,
699  &inst_props.enabledLayerCount, &debug_mode);
700  if (err)
701  goto fail;
702 
703  /* Check for present/missing extensions */
704  err = check_extensions(ctx, 0, opts, &inst_props.ppEnabledExtensionNames,
705  &inst_props.enabledExtensionCount, debug_mode);
706  hwctx->enabled_inst_extensions = inst_props.ppEnabledExtensionNames;
707  hwctx->nb_enabled_inst_extensions = inst_props.enabledExtensionCount;
708  if (err < 0)
709  goto fail;
710 
711  /* Try to create the instance */
712  ret = vk->CreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
713 
714  /* Check for errors */
715  if (ret != VK_SUCCESS) {
716  av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
717  vk_ret2str(ret));
718  err = AVERROR_EXTERNAL;
719  goto fail;
720  }
721 
722  err = ff_vk_load_functions(ctx, vk, p->extensions, 1, 0);
723  if (err < 0) {
724  av_log(ctx, AV_LOG_ERROR, "Unable to load instance functions!\n");
725  goto fail;
726  }
727 
728  if (debug_mode) {
729  VkDebugUtilsMessengerCreateInfoEXT dbg = {
730  .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
731  .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
732  VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
733  VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
734  VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
735  .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
736  VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
737  VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
738  .pfnUserCallback = vk_dbg_callback,
739  .pUserData = ctx,
740  };
741 
742  vk->CreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
743  hwctx->alloc, &p->debug_ctx);
744  }
745 
746  err = 0;
747 
748 fail:
749  RELEASE_PROPS(inst_props.ppEnabledLayerNames, inst_props.enabledLayerCount);
750  return err;
751 }
752 
753 typedef struct VulkanDeviceSelection {
754  uint8_t uuid[VK_UUID_SIZE]; /* Will use this first unless !has_uuid */
755  int has_uuid;
756  const char *name; /* Will use this second unless NULL */
757  uint32_t pci_device; /* Will use this third unless 0x0 */
758  uint32_t vendor_id; /* Last resort to find something deterministic */
759  int index; /* Finally fall back to index */
761 
762 static const char *vk_dev_type(enum VkPhysicalDeviceType type)
763 {
764  switch (type) {
765  case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
766  case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: return "discrete";
767  case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: return "virtual";
768  case VK_PHYSICAL_DEVICE_TYPE_CPU: return "software";
769  default: return "unknown";
770  }
771 }
772 
773 /* Finds a device */
775 {
776  int err = 0, choice = -1;
777  uint32_t num;
778  VkResult ret;
779  VulkanDevicePriv *p = ctx->internal->priv;
780  FFVulkanFunctions *vk = &p->vkfn;
781  VkPhysicalDevice *devices = NULL;
782  VkPhysicalDeviceIDProperties *idp = NULL;
783  VkPhysicalDeviceProperties2 *prop = NULL;
784  AVVulkanDeviceContext *hwctx = ctx->hwctx;
785 
786  ret = vk->EnumeratePhysicalDevices(hwctx->inst, &num, NULL);
787  if (ret != VK_SUCCESS || !num) {
788  av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
789  return AVERROR(ENODEV);
790  }
791 
792  devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
793  if (!devices)
794  return AVERROR(ENOMEM);
795 
796  ret = vk->EnumeratePhysicalDevices(hwctx->inst, &num, devices);
797  if (ret != VK_SUCCESS) {
798  av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
799  vk_ret2str(ret));
800  err = AVERROR(ENODEV);
801  goto end;
802  }
803 
804  prop = av_calloc(num, sizeof(*prop));
805  if (!prop) {
806  err = AVERROR(ENOMEM);
807  goto end;
808  }
809 
810  idp = av_calloc(num, sizeof(*idp));
811  if (!idp) {
812  err = AVERROR(ENOMEM);
813  goto end;
814  }
815 
816  av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
817  for (int i = 0; i < num; i++) {
818  idp[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
819  prop[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
820  prop[i].pNext = &idp[i];
821 
822  vk->GetPhysicalDeviceProperties2(devices[i], &prop[i]);
823  av_log(ctx, AV_LOG_VERBOSE, " %d: %s (%s) (0x%x)\n", i,
824  prop[i].properties.deviceName,
825  vk_dev_type(prop[i].properties.deviceType),
826  prop[i].properties.deviceID);
827  }
828 
829  if (select->has_uuid) {
830  for (int i = 0; i < num; i++) {
831  if (!strncmp(idp[i].deviceUUID, select->uuid, VK_UUID_SIZE)) {
832  choice = i;
833  goto end;
834  }
835  }
836  av_log(ctx, AV_LOG_ERROR, "Unable to find device by given UUID!\n");
837  err = AVERROR(ENODEV);
838  goto end;
839  } else if (select->name) {
840  av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
841  for (int i = 0; i < num; i++) {
842  if (strstr(prop[i].properties.deviceName, select->name)) {
843  choice = i;
844  goto end;
845  }
846  }
847  av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
848  select->name);
849  err = AVERROR(ENODEV);
850  goto end;
851  } else if (select->pci_device) {
852  av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
853  for (int i = 0; i < num; i++) {
854  if (select->pci_device == prop[i].properties.deviceID) {
855  choice = i;
856  goto end;
857  }
858  }
859  av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
860  select->pci_device);
861  err = AVERROR(EINVAL);
862  goto end;
863  } else if (select->vendor_id) {
864  av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
865  for (int i = 0; i < num; i++) {
866  if (select->vendor_id == prop[i].properties.vendorID) {
867  choice = i;
868  goto end;
869  }
870  }
871  av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
872  select->vendor_id);
873  err = AVERROR(ENODEV);
874  goto end;
875  } else {
876  if (select->index < num) {
877  choice = select->index;
878  goto end;
879  }
880  av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
881  select->index);
882  err = AVERROR(ENODEV);
883  goto end;
884  }
885 
886 end:
887  if (choice > -1) {
888  av_log(ctx, AV_LOG_VERBOSE, "Device %d selected: %s (%s) (0x%x)\n",
889  choice, prop[choice].properties.deviceName,
890  vk_dev_type(prop[choice].properties.deviceType),
891  prop[choice].properties.deviceID);
892  hwctx->phys_dev = devices[choice];
893  }
894 
895  av_free(devices);
896  av_free(prop);
897  av_free(idp);
898 
899  return err;
900 }
901 
902 /* Picks the least used qf with the fewest unneeded flags, or -1 if none found */
903 static inline int pick_queue_family(VkQueueFamilyProperties *qf, uint32_t num_qf,
904  VkQueueFlagBits flags)
905 {
906  int index = -1;
907  uint32_t min_score = UINT32_MAX;
908 
909  for (int i = 0; i < num_qf; i++) {
910  const VkQueueFlagBits qflags = qf[i].queueFlags;
911  if (qflags & flags) {
912  uint32_t score = av_popcount(qflags) + qf[i].timestampValidBits;
913  if (score < min_score) {
914  index = i;
915  min_score = score;
916  }
917  }
918  }
919 
920  if (index > -1)
921  qf[index].timestampValidBits++;
922 
923  return index;
924 }
925 
926 static int setup_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
927 {
928  uint32_t num;
929  float *weights;
930  VkQueueFamilyProperties *qf = NULL;
931  VulkanDevicePriv *p = ctx->internal->priv;
932  FFVulkanFunctions *vk = &p->vkfn;
933  AVVulkanDeviceContext *hwctx = ctx->hwctx;
934  int graph_index, comp_index, tx_index, enc_index, dec_index;
935 
936  /* First get the number of queue families */
937  vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
938  if (!num) {
939  av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
940  return AVERROR_EXTERNAL;
941  }
942 
943  /* Then allocate memory */
944  qf = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
945  if (!qf)
946  return AVERROR(ENOMEM);
947 
948  /* Finally retrieve the queue families */
949  vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qf);
950 
951  av_log(ctx, AV_LOG_VERBOSE, "Queue families:\n");
952  for (int i = 0; i < num; i++) {
953  av_log(ctx, AV_LOG_VERBOSE, " %i:%s%s%s%s%s%s%s (queues: %i)\n", i,
954  ((qf[i].queueFlags) & VK_QUEUE_GRAPHICS_BIT) ? " graphics" : "",
955  ((qf[i].queueFlags) & VK_QUEUE_COMPUTE_BIT) ? " compute" : "",
956  ((qf[i].queueFlags) & VK_QUEUE_TRANSFER_BIT) ? " transfer" : "",
957  ((qf[i].queueFlags) & VK_QUEUE_VIDEO_ENCODE_BIT_KHR) ? " encode" : "",
958  ((qf[i].queueFlags) & VK_QUEUE_VIDEO_DECODE_BIT_KHR) ? " decode" : "",
959  ((qf[i].queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ? " sparse" : "",
960  ((qf[i].queueFlags) & VK_QUEUE_PROTECTED_BIT) ? " protected" : "",
961  qf[i].queueCount);
962 
963  /* We use this field to keep a score of how many times we've used that
964  * queue family in order to make better choices. */
965  qf[i].timestampValidBits = 0;
966  }
967 
968  /* Pick each queue family to use */
969  graph_index = pick_queue_family(qf, num, VK_QUEUE_GRAPHICS_BIT);
970  comp_index = pick_queue_family(qf, num, VK_QUEUE_COMPUTE_BIT);
971  tx_index = pick_queue_family(qf, num, VK_QUEUE_TRANSFER_BIT);
972  enc_index = pick_queue_family(qf, num, VK_QUEUE_VIDEO_ENCODE_BIT_KHR);
973  dec_index = pick_queue_family(qf, num, VK_QUEUE_VIDEO_DECODE_BIT_KHR);
974 
975  /* Signalling the transfer capabilities on a queue family is optional */
976  if (tx_index < 0) {
977  tx_index = pick_queue_family(qf, num, VK_QUEUE_COMPUTE_BIT);
978  if (tx_index < 0)
979  tx_index = pick_queue_family(qf, num, VK_QUEUE_GRAPHICS_BIT);
980  }
981 
982  hwctx->queue_family_index = -1;
983  hwctx->queue_family_comp_index = -1;
984  hwctx->queue_family_tx_index = -1;
985  hwctx->queue_family_encode_index = -1;
986  hwctx->queue_family_decode_index = -1;
987 
988 #define SETUP_QUEUE(qf_idx) \
989  if (qf_idx > -1) { \
990  int fidx = qf_idx; \
991  int qc = qf[fidx].queueCount; \
992  VkDeviceQueueCreateInfo *pc; \
993  \
994  if (fidx == graph_index) { \
995  hwctx->queue_family_index = fidx; \
996  hwctx->nb_graphics_queues = qc; \
997  graph_index = -1; \
998  } \
999  if (fidx == comp_index) { \
1000  hwctx->queue_family_comp_index = fidx; \
1001  hwctx->nb_comp_queues = qc; \
1002  comp_index = -1; \
1003  } \
1004  if (fidx == tx_index) { \
1005  hwctx->queue_family_tx_index = fidx; \
1006  hwctx->nb_tx_queues = qc; \
1007  tx_index = -1; \
1008  } \
1009  if (fidx == enc_index) { \
1010  hwctx->queue_family_encode_index = fidx; \
1011  hwctx->nb_encode_queues = qc; \
1012  enc_index = -1; \
1013  } \
1014  if (fidx == dec_index) { \
1015  hwctx->queue_family_decode_index = fidx; \
1016  hwctx->nb_decode_queues = qc; \
1017  dec_index = -1; \
1018  } \
1019  \
1020  pc = av_realloc((void *)cd->pQueueCreateInfos, \
1021  sizeof(*pc) * (cd->queueCreateInfoCount + 1)); \
1022  if (!pc) { \
1023  av_free(qf); \
1024  return AVERROR(ENOMEM); \
1025  } \
1026  cd->pQueueCreateInfos = pc; \
1027  pc = &pc[cd->queueCreateInfoCount]; \
1028  \
1029  weights = av_malloc(qc * sizeof(float)); \
1030  if (!weights) { \
1031  av_free(qf); \
1032  return AVERROR(ENOMEM); \
1033  } \
1034  \
1035  memset(pc, 0, sizeof(*pc)); \
1036  pc->sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; \
1037  pc->queueFamilyIndex = fidx; \
1038  pc->queueCount = qc; \
1039  pc->pQueuePriorities = weights; \
1040  \
1041  for (int i = 0; i < qc; i++) \
1042  weights[i] = 1.0f / qc; \
1043  \
1044  cd->queueCreateInfoCount++; \
1045  }
1046 
1047  SETUP_QUEUE(graph_index)
1048  SETUP_QUEUE(comp_index)
1049  SETUP_QUEUE(tx_index)
1050  SETUP_QUEUE(enc_index)
1051  SETUP_QUEUE(dec_index)
1052 
1053 #undef SETUP_QUEUE
1054 
1055  av_free(qf);
1056 
1057  return 0;
1058 }
1059 
1061  int queue_family_index, int num_queues)
1062 {
1063  VkResult ret;
1064  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1066  FFVulkanFunctions *vk = &p->vkfn;
1067 
1068  VkCommandPoolCreateInfo cqueue_create = {
1069  .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
1070  .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
1071  .queueFamilyIndex = queue_family_index,
1072  };
1073  VkCommandBufferAllocateInfo cbuf_create = {
1074  .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
1075  .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
1076  .commandBufferCount = num_queues,
1077  };
1078 
1079  cmd->nb_queues = num_queues;
1080 
1081  /* Create command pool */
1082  ret = vk->CreateCommandPool(hwctx->act_dev, &cqueue_create,
1083  hwctx->alloc, &cmd->pool);
1084  if (ret != VK_SUCCESS) {
1085  av_log(hwfc, AV_LOG_ERROR, "Command pool creation failure: %s\n",
1086  vk_ret2str(ret));
1087  return AVERROR_EXTERNAL;
1088  }
1089 
1090  cmd->bufs = av_mallocz(num_queues * sizeof(*cmd->bufs));
1091  if (!cmd->bufs)
1092  return AVERROR(ENOMEM);
1093 
1094  cbuf_create.commandPool = cmd->pool;
1095 
1096  /* Allocate command buffer */
1097  ret = vk->AllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
1098  if (ret != VK_SUCCESS) {
1099  av_log(hwfc, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
1100  vk_ret2str(ret));
1101  av_freep(&cmd->bufs);
1102  return AVERROR_EXTERNAL;
1103  }
1104 
1105  cmd->queues = av_mallocz(num_queues * sizeof(*cmd->queues));
1106  if (!cmd->queues)
1107  return AVERROR(ENOMEM);
1108 
1109  for (int i = 0; i < num_queues; i++) {
1110  VulkanQueueCtx *q = &cmd->queues[i];
1111  vk->GetDeviceQueue(hwctx->act_dev, queue_family_index, i, &q->queue);
1112  q->was_synchronous = 1;
1113  }
1114 
1115  return 0;
1116 }
1117 
1119 {
1120  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1122  FFVulkanFunctions *vk = &p->vkfn;
1123 
1124  if (cmd->queues) {
1125  for (int i = 0; i < cmd->nb_queues; i++) {
1126  VulkanQueueCtx *q = &cmd->queues[i];
1127 
1128  /* Make sure all queues have finished executing */
1129  if (q->fence && !q->was_synchronous) {
1130  vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
1131  vk->ResetFences(hwctx->act_dev, 1, &q->fence);
1132  }
1133 
1134  /* Free the fence */
1135  if (q->fence)
1136  vk->DestroyFence(hwctx->act_dev, q->fence, hwctx->alloc);
1137 
1138  /* Free buffer dependencies */
1139  for (int j = 0; j < q->nb_buf_deps; j++)
1140  av_buffer_unref(&q->buf_deps[j]);
1141  av_free(q->buf_deps);
1142  }
1143  }
1144 
1145  if (cmd->bufs)
1146  vk->FreeCommandBuffers(hwctx->act_dev, cmd->pool, cmd->nb_queues, cmd->bufs);
1147  if (cmd->pool)
1148  vk->DestroyCommandPool(hwctx->act_dev, cmd->pool, hwctx->alloc);
1149 
1150  av_freep(&cmd->queues);
1151  av_freep(&cmd->bufs);
1152  cmd->pool = NULL;
1153 }
1154 
1155 static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
1156 {
1157  return cmd->bufs[cmd->cur_queue_idx];
1158 }
1159 
1161 {
1162  VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
1163 
1164  for (int j = 0; j < q->nb_buf_deps; j++)
1165  av_buffer_unref(&q->buf_deps[j]);
1166  q->nb_buf_deps = 0;
1167 }
1168 
1170 {
1171  VkResult ret;
1172  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1173  VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
1175  FFVulkanFunctions *vk = &p->vkfn;
1176 
1177  VkCommandBufferBeginInfo cmd_start = {
1178  .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1179  .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1180  };
1181 
1182  /* Create the fence and don't wait for it initially */
1183  if (!q->fence) {
1184  VkFenceCreateInfo fence_spawn = {
1185  .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
1186  };
1187  ret = vk->CreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
1188  &q->fence);
1189  if (ret != VK_SUCCESS) {
1190  av_log(hwfc, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
1191  vk_ret2str(ret));
1192  return AVERROR_EXTERNAL;
1193  }
1194  } else if (!q->was_synchronous) {
1195  vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
1196  vk->ResetFences(hwctx->act_dev, 1, &q->fence);
1197  }
1198 
1199  /* Discard queue dependencies */
1200  unref_exec_ctx_deps(hwfc, cmd);
1201 
1202  ret = vk->BeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
1203  if (ret != VK_SUCCESS) {
1204  av_log(hwfc, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
1205  vk_ret2str(ret));
1206  return AVERROR_EXTERNAL;
1207  }
1208 
1209  return 0;
1210 }
1211 
1213  AVBufferRef * const *deps, int nb_deps)
1214 {
1215  AVBufferRef **dst;
1216  VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
1217 
1218  if (!deps || !nb_deps)
1219  return 0;
1220 
1222  (q->nb_buf_deps + nb_deps) * sizeof(*dst));
1223  if (!dst)
1224  goto err;
1225 
1226  q->buf_deps = dst;
1227 
1228  for (int i = 0; i < nb_deps; i++) {
1229  q->buf_deps[q->nb_buf_deps] = av_buffer_ref(deps[i]);
1230  if (!q->buf_deps[q->nb_buf_deps])
1231  goto err;
1232  q->nb_buf_deps++;
1233  }
1234 
1235  return 0;
1236 
1237 err:
1238  unref_exec_ctx_deps(hwfc, cmd);
1239  return AVERROR(ENOMEM);
1240 }
1241 
1243  VkSubmitInfo *s_info, AVVkFrame *f, int synchronous)
1244 {
1245  VkResult ret;
1246  VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
1248  FFVulkanFunctions *vk = &p->vkfn;
1249 
1250  ret = vk->EndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
1251  if (ret != VK_SUCCESS) {
1252  av_log(hwfc, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
1253  vk_ret2str(ret));
1254  unref_exec_ctx_deps(hwfc, cmd);
1255  return AVERROR_EXTERNAL;
1256  }
1257 
1258  s_info->pCommandBuffers = &cmd->bufs[cmd->cur_queue_idx];
1259  s_info->commandBufferCount = 1;
1260 
1261  ret = vk->QueueSubmit(q->queue, 1, s_info, q->fence);
1262  if (ret != VK_SUCCESS) {
1263  av_log(hwfc, AV_LOG_ERROR, "Queue submission failure: %s\n",
1264  vk_ret2str(ret));
1265  unref_exec_ctx_deps(hwfc, cmd);
1266  return AVERROR_EXTERNAL;
1267  }
1268 
1269  if (f)
1270  for (int i = 0; i < s_info->signalSemaphoreCount; i++)
1271  f->sem_value[i]++;
1272 
1273  q->was_synchronous = synchronous;
1274 
1275  if (synchronous) {
1276  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1277  vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
1278  vk->ResetFences(hwctx->act_dev, 1, &q->fence);
1279  unref_exec_ctx_deps(hwfc, cmd);
1280  } else { /* Rotate queues */
1281  cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
1282  }
1283 
1284  return 0;
1285 }
1286 
1288 {
1289  VulkanDevicePriv *p = ctx->internal->priv;
1290  FFVulkanFunctions *vk = &p->vkfn;
1291  AVVulkanDeviceContext *hwctx = ctx->hwctx;
1292 
1293  if (hwctx->act_dev)
1294  vk->DestroyDevice(hwctx->act_dev, hwctx->alloc);
1295 
1296  if (p->debug_ctx)
1297  vk->DestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
1298  hwctx->alloc);
1299 
1300  if (hwctx->inst)
1301  vk->DestroyInstance(hwctx->inst, hwctx->alloc);
1302 
1303  if (p->libvulkan)
1304  dlclose(p->libvulkan);
1305 
1308 }
1309 
1311  VulkanDeviceSelection *dev_select,
1312  AVDictionary *opts, int flags)
1313 {
1314  int err = 0;
1315  VkResult ret;
1316  AVDictionaryEntry *opt_d;
1317  VulkanDevicePriv *p = ctx->internal->priv;
1318  FFVulkanFunctions *vk = &p->vkfn;
1319  AVVulkanDeviceContext *hwctx = ctx->hwctx;
1320 
1321  /*
1322  * VkPhysicalDeviceVulkan12Features has a timelineSemaphore field, but
1323  * MoltenVK doesn't implement VkPhysicalDeviceVulkan12Features yet, so we
1324  * use VkPhysicalDeviceTimelineSemaphoreFeatures directly.
1325  */
1326  VkPhysicalDeviceTimelineSemaphoreFeatures timeline_features = {
1327  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
1328  };
1329  VkPhysicalDeviceVulkan12Features dev_features_1_2 = {
1330  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
1331  .pNext = &timeline_features,
1332  };
1333  VkPhysicalDeviceVulkan11Features dev_features_1_1 = {
1334  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
1335  .pNext = &dev_features_1_2,
1336  };
1337  VkPhysicalDeviceFeatures2 dev_features = {
1338  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
1339  .pNext = &dev_features_1_1,
1340  };
1341 
1342  VkDeviceCreateInfo dev_info = {
1343  .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
1344  .pNext = &hwctx->device_features,
1345  };
1346 
1347  hwctx->device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1348  hwctx->device_features.pNext = &p->device_features_1_1;
1349  p->device_features_1_1.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
1351  p->device_features_1_2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
1352  ctx->free = vulkan_device_free;
1353 
1354  /* Create an instance if not given one */
1355  if ((err = create_instance(ctx, opts)))
1356  goto end;
1357 
1358  /* Find a device (if not given one) */
1359  if ((err = find_device(ctx, dev_select)))
1360  goto end;
1361 
1362  vk->GetPhysicalDeviceFeatures2(hwctx->phys_dev, &dev_features);
1363 
1364  /* Try to keep in sync with libplacebo */
1365 #define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.features.NAME;
1366  COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
1367  COPY_FEATURE(hwctx->device_features, shaderStorageImageReadWithoutFormat)
1368  COPY_FEATURE(hwctx->device_features, shaderStorageImageWriteWithoutFormat)
1369  COPY_FEATURE(hwctx->device_features, fragmentStoresAndAtomics)
1370  COPY_FEATURE(hwctx->device_features, vertexPipelineStoresAndAtomics)
1371  COPY_FEATURE(hwctx->device_features, shaderInt64)
1372 #undef COPY_FEATURE
1373 
1374  /* We require timeline semaphores */
1375  if (!timeline_features.timelineSemaphore) {
1376  av_log(ctx, AV_LOG_ERROR, "Device does not support timeline semaphores!\n");
1377  err = AVERROR(ENOSYS);
1378  goto end;
1379  }
1380  p->device_features_1_2.timelineSemaphore = 1;
1381 
1382  /* Setup queue family */
1383  if ((err = setup_queue_families(ctx, &dev_info)))
1384  goto end;
1385 
1386  if ((err = check_extensions(ctx, 1, opts, &dev_info.ppEnabledExtensionNames,
1387  &dev_info.enabledExtensionCount, 0))) {
1388  for (int i = 0; i < dev_info.queueCreateInfoCount; i++)
1389  av_free((void *)dev_info.pQueueCreateInfos[i].pQueuePriorities);
1390  av_free((void *)dev_info.pQueueCreateInfos);
1391  goto end;
1392  }
1393 
1394  ret = vk->CreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
1395  &hwctx->act_dev);
1396 
1397  for (int i = 0; i < dev_info.queueCreateInfoCount; i++)
1398  av_free((void *)dev_info.pQueueCreateInfos[i].pQueuePriorities);
1399  av_free((void *)dev_info.pQueueCreateInfos);
1400 
1401  if (ret != VK_SUCCESS) {
1402  av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
1403  vk_ret2str(ret));
1404  for (int i = 0; i < dev_info.enabledExtensionCount; i++)
1405  av_free((void *)dev_info.ppEnabledExtensionNames[i]);
1406  av_free((void *)dev_info.ppEnabledExtensionNames);
1407  err = AVERROR_EXTERNAL;
1408  goto end;
1409  }
1410 
1411  /* Tiled images setting, use them by default */
1412  opt_d = av_dict_get(opts, "linear_images", NULL, 0);
1413  if (opt_d)
1414  p->use_linear_images = strtol(opt_d->value, NULL, 10);
1415 
1416  opt_d = av_dict_get(opts, "contiguous_planes", NULL, 0);
1417  if (opt_d)
1418  p->contiguous_planes = strtol(opt_d->value, NULL, 10);
1419  else
1420  p->contiguous_planes = -1;
1421 
1422  hwctx->enabled_dev_extensions = dev_info.ppEnabledExtensionNames;
1423  hwctx->nb_enabled_dev_extensions = dev_info.enabledExtensionCount;
1424 
1425 end:
1426  return err;
1427 }
1428 
1430 {
1431  int err;
1432  uint32_t queue_num;
1433  AVVulkanDeviceContext *hwctx = ctx->hwctx;
1434  VulkanDevicePriv *p = ctx->internal->priv;
1435  FFVulkanFunctions *vk = &p->vkfn;
1436  int graph_index, comp_index, tx_index, enc_index, dec_index;
1437 
1438  /* Set device extension flags */
1439  for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++) {
1440  for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
1441  if (!strcmp(hwctx->enabled_dev_extensions[i],
1442  optional_device_exts[j].name)) {
1444  break;
1445  }
1446  }
1447  }
1448 
1449  err = ff_vk_load_functions(ctx, vk, p->extensions, 1, 1);
1450  if (err < 0) {
1451  av_log(ctx, AV_LOG_ERROR, "Unable to load functions!\n");
1452  return err;
1453  }
1454 
1455  p->props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1456  p->props.pNext = &p->hprops;
1457  p->hprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
1458 
1459  vk->GetPhysicalDeviceProperties2(hwctx->phys_dev, &p->props);
1460  av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n",
1461  p->props.properties.deviceName);
1462  av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
1463  av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyRowPitchAlignment: %"PRIu64"\n",
1464  p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
1465  av_log(ctx, AV_LOG_VERBOSE, " minMemoryMapAlignment: %"SIZE_SPECIFIER"\n",
1466  p->props.properties.limits.minMemoryMapAlignment);
1468  av_log(ctx, AV_LOG_VERBOSE, " minImportedHostPointerAlignment: %"PRIu64"\n",
1469  p->hprops.minImportedHostPointerAlignment);
1470 
1471  p->dev_is_nvidia = (p->props.properties.vendorID == 0x10de);
1472  p->dev_is_intel = (p->props.properties.vendorID == 0x8086);
1473 
1474  vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
1475  if (!queue_num) {
1476  av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
1477  return AVERROR_EXTERNAL;
1478  }
1479 
1480  graph_index = hwctx->queue_family_index;
1481  comp_index = hwctx->queue_family_comp_index;
1482  tx_index = hwctx->queue_family_tx_index;
1483  enc_index = hwctx->queue_family_encode_index;
1484  dec_index = hwctx->queue_family_decode_index;
1485 
1486 #define CHECK_QUEUE(type, required, fidx, ctx_qf, qc) \
1487  do { \
1488  if (ctx_qf < 0 && required) { \
1489  av_log(ctx, AV_LOG_ERROR, "%s queue family is required, but marked as missing" \
1490  " in the context!\n", type); \
1491  return AVERROR(EINVAL); \
1492  } else if (fidx < 0 || ctx_qf < 0) { \
1493  break; \
1494  } else if (ctx_qf >= queue_num) { \
1495  av_log(ctx, AV_LOG_ERROR, "Invalid %s family index %i (device has %i families)!\n", \
1496  type, ctx_qf, queue_num); \
1497  return AVERROR(EINVAL); \
1498  } \
1499  \
1500  av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (queues: %i)" \
1501  " for%s%s%s%s%s\n", \
1502  ctx_qf, qc, \
1503  ctx_qf == graph_index ? " graphics" : "", \
1504  ctx_qf == comp_index ? " compute" : "", \
1505  ctx_qf == tx_index ? " transfers" : "", \
1506  ctx_qf == enc_index ? " encode" : "", \
1507  ctx_qf == dec_index ? " decode" : ""); \
1508  graph_index = (ctx_qf == graph_index) ? -1 : graph_index; \
1509  comp_index = (ctx_qf == comp_index) ? -1 : comp_index; \
1510  tx_index = (ctx_qf == tx_index) ? -1 : tx_index; \
1511  enc_index = (ctx_qf == enc_index) ? -1 : enc_index; \
1512  dec_index = (ctx_qf == dec_index) ? -1 : dec_index; \
1513  p->qfs[p->num_qfs++] = ctx_qf; \
1514  } while (0)
1515 
1516  CHECK_QUEUE("graphics", 0, graph_index, hwctx->queue_family_index, hwctx->nb_graphics_queues);
1517  CHECK_QUEUE("upload", 1, tx_index, hwctx->queue_family_tx_index, hwctx->nb_tx_queues);
1518  CHECK_QUEUE("compute", 1, comp_index, hwctx->queue_family_comp_index, hwctx->nb_comp_queues);
1519  CHECK_QUEUE("encode", 0, enc_index, hwctx->queue_family_encode_index, hwctx->nb_encode_queues);
1520  CHECK_QUEUE("decode", 0, dec_index, hwctx->queue_family_decode_index, hwctx->nb_decode_queues);
1521 
1522 #undef CHECK_QUEUE
1523 
1524  /* Get device capabilities */
1525  vk->GetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
1526 
1527  return 0;
1528 }
1529 
1530 static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
1531  AVDictionary *opts, int flags)
1532 {
1533  VulkanDeviceSelection dev_select = { 0 };
1534  if (device && device[0]) {
1535  char *end = NULL;
1536  dev_select.index = strtol(device, &end, 10);
1537  if (end == device) {
1538  dev_select.index = 0;
1539  dev_select.name = device;
1540  }
1541  }
1542 
1543  return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1544 }
1545 
1547  AVHWDeviceContext *src_ctx,
1548  AVDictionary *opts, int flags)
1549 {
1550  av_unused VulkanDeviceSelection dev_select = { 0 };
1551 
1552  /* If there's only one device on the system, then even if its not covered
1553  * by the following checks (e.g. non-PCIe ARM GPU), having an empty
1554  * dev_select will mean it'll get picked. */
1555  switch(src_ctx->type) {
1556 #if CONFIG_LIBDRM
1557 #if CONFIG_VAAPI
1558  case AV_HWDEVICE_TYPE_VAAPI: {
1559  AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
1560 
1561  const char *vendor = vaQueryVendorString(src_hwctx->display);
1562  if (!vendor) {
1563  av_log(ctx, AV_LOG_ERROR, "Unable to get device info from VAAPI!\n");
1564  return AVERROR_EXTERNAL;
1565  }
1566 
1567  if (strstr(vendor, "Intel"))
1568  dev_select.vendor_id = 0x8086;
1569  if (strstr(vendor, "AMD"))
1570  dev_select.vendor_id = 0x1002;
1571 
1572  return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1573  }
1574 #endif
1575  case AV_HWDEVICE_TYPE_DRM: {
1576  AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
1577 
1578  drmDevice *drm_dev_info;
1579  int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
1580  if (err) {
1581  av_log(ctx, AV_LOG_ERROR, "Unable to get device info from DRM fd!\n");
1582  return AVERROR_EXTERNAL;
1583  }
1584 
1585  if (drm_dev_info->bustype == DRM_BUS_PCI)
1586  dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
1587 
1588  drmFreeDevice(&drm_dev_info);
1589 
1590  return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1591  }
1592 #endif
1593 #if CONFIG_CUDA
1594  case AV_HWDEVICE_TYPE_CUDA: {
1595  AVHWDeviceContext *cuda_cu = src_ctx;
1596  AVCUDADeviceContext *src_hwctx = src_ctx->hwctx;
1597  AVCUDADeviceContextInternal *cu_internal = src_hwctx->internal;
1598  CudaFunctions *cu = cu_internal->cuda_dl;
1599 
1600  int ret = CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
1601  cu_internal->cuda_device));
1602  if (ret < 0) {
1603  av_log(ctx, AV_LOG_ERROR, "Unable to get UUID from CUDA!\n");
1604  return AVERROR_EXTERNAL;
1605  }
1606 
1607  dev_select.has_uuid = 1;
1608 
1609  return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1610  }
1611 #endif
1612  default:
1613  return AVERROR(ENOSYS);
1614  }
1615 }
1616 
1618  const void *hwconfig,
1619  AVHWFramesConstraints *constraints)
1620 {
1621  int count = 0;
1622  VulkanDevicePriv *p = ctx->internal->priv;
1623 
1624  for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1625  count += pixfmt_is_supported(ctx, i, p->use_linear_images);
1626 
1627 #if CONFIG_CUDA
1628  if (p->dev_is_nvidia)
1629  count++;
1630 #endif
1631 
1632  constraints->valid_sw_formats = av_malloc_array(count + 1,
1633  sizeof(enum AVPixelFormat));
1634  if (!constraints->valid_sw_formats)
1635  return AVERROR(ENOMEM);
1636 
1637  count = 0;
1638  for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1640  constraints->valid_sw_formats[count++] = i;
1641 
1642 #if CONFIG_CUDA
1643  if (p->dev_is_nvidia)
1644  constraints->valid_sw_formats[count++] = AV_PIX_FMT_CUDA;
1645 #endif
1646  constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
1647 
1648  constraints->min_width = 0;
1649  constraints->min_height = 0;
1650  constraints->max_width = p->props.properties.limits.maxImageDimension2D;
1651  constraints->max_height = p->props.properties.limits.maxImageDimension2D;
1652 
1653  constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
1654  if (!constraints->valid_hw_formats)
1655  return AVERROR(ENOMEM);
1656 
1657  constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
1658  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1659 
1660  return 0;
1661 }
1662 
1663 static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
1664  VkMemoryPropertyFlagBits req_flags, const void *alloc_extension,
1665  VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
1666 {
1667  VkResult ret;
1668  int index = -1;
1669  VulkanDevicePriv *p = ctx->internal->priv;
1670  FFVulkanFunctions *vk = &p->vkfn;
1671  AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
1672  VkMemoryAllocateInfo alloc_info = {
1673  .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1674  .pNext = alloc_extension,
1675  .allocationSize = req->size,
1676  };
1677 
1678  /* The vulkan spec requires memory types to be sorted in the "optimal"
1679  * order, so the first matching type we find will be the best/fastest one */
1680  for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
1681  const VkMemoryType *type = &p->mprops.memoryTypes[i];
1682 
1683  /* The memory type must be supported by the requirements (bitfield) */
1684  if (!(req->memoryTypeBits & (1 << i)))
1685  continue;
1686 
1687  /* The memory type flags must include our properties */
1688  if ((type->propertyFlags & req_flags) != req_flags)
1689  continue;
1690 
1691  /* The memory type must be large enough */
1692  if (req->size > p->mprops.memoryHeaps[type->heapIndex].size)
1693  continue;
1694 
1695  /* Found a suitable memory type */
1696  index = i;
1697  break;
1698  }
1699 
1700  if (index < 0) {
1701  av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
1702  req_flags);
1703  return AVERROR(EINVAL);
1704  }
1705 
1706  alloc_info.memoryTypeIndex = index;
1707 
1708  ret = vk->AllocateMemory(dev_hwctx->act_dev, &alloc_info,
1709  dev_hwctx->alloc, mem);
1710  if (ret != VK_SUCCESS) {
1711  av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
1712  vk_ret2str(ret));
1713  return AVERROR(ENOMEM);
1714  }
1715 
1716  *mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
1717 
1718  return 0;
1719 }
1720 
1722 {
1723  AVVkFrameInternal *internal = f->internal;
1724 
1725  if (!internal)
1726  return;
1727 
1728 #if CONFIG_CUDA
1729  if (internal->cuda_fc_ref) {
1730  AVHWFramesContext *cuda_fc = (AVHWFramesContext *)internal->cuda_fc_ref->data;
1731  int planes = av_pix_fmt_count_planes(cuda_fc->sw_format);
1732  AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
1733  AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
1734  AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
1735  CudaFunctions *cu = cu_internal->cuda_dl;
1736 
1737  for (int i = 0; i < planes; i++) {
1738  if (internal->cu_sem[i])
1739  CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[i]));
1740  if (internal->cu_mma[i])
1741  CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[i]));
1742  if (internal->ext_mem[i])
1743  CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[i]));
1744 #ifdef _WIN32
1745  if (internal->ext_sem_handle[i])
1746  CloseHandle(internal->ext_sem_handle[i]);
1747  if (internal->ext_mem_handle[i])
1748  CloseHandle(internal->ext_mem_handle[i]);
1749 #endif
1750  }
1751 
1752  av_buffer_unref(&internal->cuda_fc_ref);
1753  }
1754 #endif
1755 
1756  av_freep(&f->internal);
1757 }
1758 
1759 static void vulkan_frame_free(void *opaque, uint8_t *data)
1760 {
1761  AVVkFrame *f = (AVVkFrame *)data;
1762  AVHWFramesContext *hwfc = opaque;
1763  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1765  FFVulkanFunctions *vk = &p->vkfn;
1767 
1768  /* We could use vkWaitSemaphores, but the validation layer seems to have
1769  * issues tracking command buffer execution state on uninit. */
1770  vk->DeviceWaitIdle(hwctx->act_dev);
1771 
1773 
1774  for (int i = 0; i < planes; i++) {
1775  vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
1776  vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
1777  vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
1778  }
1779 
1780  av_free(f);
1781 }
1782 
1784  void *alloc_pnext, size_t alloc_pnext_stride)
1785 {
1786  int err;
1787  VkResult ret;
1788  AVHWDeviceContext *ctx = hwfc->device_ctx;
1789  VulkanDevicePriv *p = ctx->internal->priv;
1790  FFVulkanFunctions *vk = &p->vkfn;
1791  AVVulkanFramesContext *hwfctx = hwfc->hwctx;
1792  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1793  VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
1794 
1795  VkMemoryRequirements cont_memory_requirements = { 0 };
1796  int cont_mem_size_list[AV_NUM_DATA_POINTERS] = { 0 };
1797  int cont_mem_size = 0;
1798 
1799  AVVulkanDeviceContext *hwctx = ctx->hwctx;
1800 
1801  for (int i = 0; i < planes; i++) {
1802  int use_ded_mem;
1803  VkImageMemoryRequirementsInfo2 req_desc = {
1804  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
1805  .image = f->img[i],
1806  };
1807  VkMemoryDedicatedAllocateInfo ded_alloc = {
1808  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
1809  .pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
1810  };
1811  VkMemoryDedicatedRequirements ded_req = {
1812  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
1813  };
1814  VkMemoryRequirements2 req = {
1815  .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
1816  .pNext = &ded_req,
1817  };
1818 
1819  vk->GetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
1820 
1821  if (f->tiling == VK_IMAGE_TILING_LINEAR)
1822  req.memoryRequirements.size = FFALIGN(req.memoryRequirements.size,
1823  p->props.properties.limits.minMemoryMapAlignment);
1824 
1825  if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
1826  if (ded_req.requiresDedicatedAllocation) {
1827  av_log(hwfc, AV_LOG_ERROR, "Cannot allocate all planes in a single allocation, "
1828  "device requires dedicated image allocation!\n");
1829  return AVERROR(EINVAL);
1830  } else if (!i) {
1831  cont_memory_requirements = req.memoryRequirements;
1832  } else if (cont_memory_requirements.memoryTypeBits !=
1833  req.memoryRequirements.memoryTypeBits) {
1834  av_log(hwfc, AV_LOG_ERROR, "The memory requirements differ between plane 0 "
1835  "and %i, cannot allocate in a single region!\n",
1836  i);
1837  return AVERROR(EINVAL);
1838  }
1839 
1840  cont_mem_size_list[i] = FFALIGN(req.memoryRequirements.size,
1841  req.memoryRequirements.alignment);
1842  cont_mem_size += cont_mem_size_list[i];
1843  continue;
1844  }
1845 
1846  /* In case the implementation prefers/requires dedicated allocation */
1847  use_ded_mem = ded_req.prefersDedicatedAllocation |
1848  ded_req.requiresDedicatedAllocation;
1849  if (use_ded_mem)
1850  ded_alloc.image = f->img[i];
1851 
1852  /* Allocate memory */
1853  if ((err = alloc_mem(ctx, &req.memoryRequirements,
1854  f->tiling == VK_IMAGE_TILING_LINEAR ?
1855  VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
1856  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1857  use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
1858  &f->flags, &f->mem[i])))
1859  return err;
1860 
1861  f->size[i] = req.memoryRequirements.size;
1862  bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1863  bind_info[i].image = f->img[i];
1864  bind_info[i].memory = f->mem[i];
1865  }
1866 
1867  if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
1868  cont_memory_requirements.size = cont_mem_size;
1869 
1870  /* Allocate memory */
1871  if ((err = alloc_mem(ctx, &cont_memory_requirements,
1872  f->tiling == VK_IMAGE_TILING_LINEAR ?
1873  VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
1874  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1875  (void *)(((uint8_t *)alloc_pnext)),
1876  &f->flags, &f->mem[0])))
1877  return err;
1878 
1879  f->size[0] = cont_memory_requirements.size;
1880 
1881  for (int i = 0, offset = 0; i < planes; i++) {
1882  bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1883  bind_info[i].image = f->img[i];
1884  bind_info[i].memory = f->mem[0];
1885  bind_info[i].memoryOffset = offset;
1886 
1887  f->offset[i] = bind_info[i].memoryOffset;
1888  offset += cont_mem_size_list[i];
1889  }
1890  }
1891 
1892  /* Bind the allocated memory to the images */
1893  ret = vk->BindImageMemory2(hwctx->act_dev, planes, bind_info);
1894  if (ret != VK_SUCCESS) {
1895  av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
1896  vk_ret2str(ret));
1897  return AVERROR_EXTERNAL;
1898  }
1899 
1900  return 0;
1901 }
1902 
1903 enum PrepMode {
1907 };
1908 
1910  AVVkFrame *frame, enum PrepMode pmode)
1911 {
1912  int err;
1913  uint32_t src_qf, dst_qf;
1914  VkImageLayout new_layout;
1915  VkAccessFlags new_access;
1916  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1918  FFVulkanFunctions *vk = &p->vkfn;
1919  uint64_t sem_sig_val[AV_NUM_DATA_POINTERS];
1920 
1921  VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
1922 
1923  VkTimelineSemaphoreSubmitInfo s_timeline_sem_info = {
1924  .sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
1925  .pSignalSemaphoreValues = sem_sig_val,
1926  .signalSemaphoreValueCount = planes,
1927  };
1928 
1929  VkSubmitInfo s_info = {
1930  .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1931  .pNext = &s_timeline_sem_info,
1932  .pSignalSemaphores = frame->sem,
1933  .signalSemaphoreCount = planes,
1934  };
1935 
1936  VkPipelineStageFlagBits wait_st[AV_NUM_DATA_POINTERS];
1937  for (int i = 0; i < planes; i++) {
1938  wait_st[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1939  sem_sig_val[i] = frame->sem_value[i] + 1;
1940  }
1941 
1942  switch (pmode) {
1943  case PREP_MODE_WRITE:
1944  new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1945  new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
1946  src_qf = VK_QUEUE_FAMILY_IGNORED;
1947  dst_qf = VK_QUEUE_FAMILY_IGNORED;
1948  break;
1950  new_layout = VK_IMAGE_LAYOUT_GENERAL;
1951  new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
1952  src_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
1953  dst_qf = VK_QUEUE_FAMILY_IGNORED;
1954  s_timeline_sem_info.pWaitSemaphoreValues = frame->sem_value;
1955  s_timeline_sem_info.waitSemaphoreValueCount = planes;
1956  s_info.pWaitSemaphores = frame->sem;
1957  s_info.pWaitDstStageMask = wait_st;
1958  s_info.waitSemaphoreCount = planes;
1959  break;
1961  new_layout = VK_IMAGE_LAYOUT_GENERAL;
1962  new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
1963  src_qf = VK_QUEUE_FAMILY_IGNORED;
1964  dst_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
1965  s_timeline_sem_info.pWaitSemaphoreValues = frame->sem_value;
1966  s_timeline_sem_info.waitSemaphoreValueCount = planes;
1967  s_info.pWaitSemaphores = frame->sem;
1968  s_info.pWaitDstStageMask = wait_st;
1969  s_info.waitSemaphoreCount = planes;
1970  break;
1971  }
1972 
1973  if ((err = wait_start_exec_ctx(hwfc, ectx)))
1974  return err;
1975 
1976  /* Change the image layout to something more optimal for writes.
1977  * This also signals the newly created semaphore, making it usable
1978  * for synchronization */
1979  for (int i = 0; i < planes; i++) {
1980  img_bar[i].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1981  img_bar[i].srcAccessMask = 0x0;
1982  img_bar[i].dstAccessMask = new_access;
1983  img_bar[i].oldLayout = frame->layout[i];
1984  img_bar[i].newLayout = new_layout;
1985  img_bar[i].srcQueueFamilyIndex = src_qf;
1986  img_bar[i].dstQueueFamilyIndex = dst_qf;
1987  img_bar[i].image = frame->img[i];
1988  img_bar[i].subresourceRange.levelCount = 1;
1989  img_bar[i].subresourceRange.layerCount = 1;
1990  img_bar[i].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1991 
1992  frame->layout[i] = img_bar[i].newLayout;
1993  frame->access[i] = img_bar[i].dstAccessMask;
1994  }
1995 
1996  vk->CmdPipelineBarrier(get_buf_exec_ctx(hwfc, ectx),
1997  VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
1998  VK_PIPELINE_STAGE_TRANSFER_BIT,
1999  0, 0, NULL, 0, NULL, planes, img_bar);
2000 
2001  return submit_exec_ctx(hwfc, ectx, &s_info, frame, 0);
2002 }
2003 
2004 static inline void get_plane_wh(int *w, int *h, enum AVPixelFormat format,
2005  int frame_w, int frame_h, int plane)
2006 {
2008 
2009  /* Currently always true unless gray + alpha support is added */
2010  if (!plane || (plane == 3) || desc->flags & AV_PIX_FMT_FLAG_RGB ||
2011  !(desc->flags & AV_PIX_FMT_FLAG_PLANAR)) {
2012  *w = frame_w;
2013  *h = frame_h;
2014  return;
2015  }
2016 
2017  *w = AV_CEIL_RSHIFT(frame_w, desc->log2_chroma_w);
2018  *h = AV_CEIL_RSHIFT(frame_h, desc->log2_chroma_h);
2019 }
2020 
2022  VkImageTiling tiling, VkImageUsageFlagBits usage,
2023  void *create_pnext)
2024 {
2025  int err;
2026  VkResult ret;
2027  AVHWDeviceContext *ctx = hwfc->device_ctx;
2028  VulkanDevicePriv *p = ctx->internal->priv;
2029  FFVulkanFunctions *vk = &p->vkfn;
2030  AVVulkanDeviceContext *hwctx = ctx->hwctx;
2031  enum AVPixelFormat format = hwfc->sw_format;
2032  const VkFormat *img_fmts = av_vkfmt_from_pixfmt(format);
2033  const int planes = av_pix_fmt_count_planes(format);
2034 
2035  VkExportSemaphoreCreateInfo ext_sem_info = {
2036  .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
2037 #ifdef _WIN32
2038  .handleTypes = IsWindows8OrGreater()
2039  ? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
2040  : VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
2041 #else
2042  .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
2043 #endif
2044  };
2045 
2046  VkSemaphoreTypeCreateInfo sem_type_info = {
2047  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
2048 #ifdef _WIN32
2049  .pNext = p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM ? &ext_sem_info : NULL,
2050 #else
2051  .pNext = p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM ? &ext_sem_info : NULL,
2052 #endif
2053  .semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
2054  .initialValue = 0,
2055  };
2056 
2057  VkSemaphoreCreateInfo sem_spawn = {
2058  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2059  .pNext = &sem_type_info,
2060  };
2061 
2063  if (!f) {
2064  av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
2065  return AVERROR(ENOMEM);
2066  }
2067 
2068  /* Create the images */
2069  for (int i = 0; i < planes; i++) {
2070  VkImageCreateInfo create_info = {
2071  .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2072  .pNext = create_pnext,
2073  .imageType = VK_IMAGE_TYPE_2D,
2074  .format = img_fmts[i],
2075  .extent.depth = 1,
2076  .mipLevels = 1,
2077  .arrayLayers = 1,
2078  .flags = VK_IMAGE_CREATE_ALIAS_BIT,
2079  .tiling = tiling,
2080  .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
2081  .usage = usage,
2082  .samples = VK_SAMPLE_COUNT_1_BIT,
2083  .pQueueFamilyIndices = p->qfs,
2084  .queueFamilyIndexCount = p->num_qfs,
2085  .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2086  VK_SHARING_MODE_EXCLUSIVE,
2087  };
2088 
2089  get_plane_wh(&create_info.extent.width, &create_info.extent.height,
2090  format, hwfc->width, hwfc->height, i);
2091 
2092  ret = vk->CreateImage(hwctx->act_dev, &create_info,
2093  hwctx->alloc, &f->img[i]);
2094  if (ret != VK_SUCCESS) {
2095  av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
2096  vk_ret2str(ret));
2097  err = AVERROR(EINVAL);
2098  goto fail;
2099  }
2100 
2101  /* Create semaphore */
2102  ret = vk->CreateSemaphore(hwctx->act_dev, &sem_spawn,
2103  hwctx->alloc, &f->sem[i]);
2104  if (ret != VK_SUCCESS) {
2105  av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
2106  vk_ret2str(ret));
2107  return AVERROR_EXTERNAL;
2108  }
2109 
2110  f->layout[i] = create_info.initialLayout;
2111  f->access[i] = 0x0;
2112  f->sem_value[i] = 0;
2113  }
2114 
2115  f->flags = 0x0;
2116  f->tiling = tiling;
2117 
2118  *frame = f;
2119  return 0;
2120 
2121 fail:
2122  vulkan_frame_free(hwfc, (uint8_t *)f);
2123  return err;
2124 }
2125 
2126 /* Checks if an export flag is enabled, and if it is ORs it with *iexp */
2128  VkExternalMemoryHandleTypeFlags *comp_handle_types,
2129  VkExternalMemoryHandleTypeFlagBits *iexp,
2130  VkExternalMemoryHandleTypeFlagBits exp)
2131 {
2132  VkResult ret;
2133  AVVulkanFramesContext *hwctx = hwfc->hwctx;
2134  AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
2136  FFVulkanFunctions *vk = &p->vkfn;
2137 
2138  const VkImageDrmFormatModifierListCreateInfoEXT *drm_mod_info =
2140  VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT);
2141  int has_mods = hwctx->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT && drm_mod_info;
2142  int nb_mods;
2143 
2144  VkExternalImageFormatProperties eprops = {
2145  .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
2146  };
2147  VkImageFormatProperties2 props = {
2148  .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
2149  .pNext = &eprops,
2150  };
2151  VkPhysicalDeviceImageDrmFormatModifierInfoEXT phy_dev_mod_info = {
2152  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
2153  .pNext = NULL,
2154  .pQueueFamilyIndices = p->qfs,
2155  .queueFamilyIndexCount = p->num_qfs,
2156  .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2157  VK_SHARING_MODE_EXCLUSIVE,
2158  };
2159  VkPhysicalDeviceExternalImageFormatInfo enext = {
2160  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
2161  .handleType = exp,
2162  .pNext = has_mods ? &phy_dev_mod_info : NULL,
2163  };
2164  VkPhysicalDeviceImageFormatInfo2 pinfo = {
2165  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
2166  .pNext = !exp ? NULL : &enext,
2167  .format = av_vkfmt_from_pixfmt(hwfc->sw_format)[0],
2168  .type = VK_IMAGE_TYPE_2D,
2169  .tiling = hwctx->tiling,
2170  .usage = hwctx->usage,
2171  .flags = VK_IMAGE_CREATE_ALIAS_BIT,
2172  };
2173 
2174  nb_mods = has_mods ? drm_mod_info->drmFormatModifierCount : 1;
2175  for (int i = 0; i < nb_mods; i++) {
2176  if (has_mods)
2177  phy_dev_mod_info.drmFormatModifier = drm_mod_info->pDrmFormatModifiers[i];
2178 
2179  ret = vk->GetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
2180  &pinfo, &props);
2181 
2182  if (ret == VK_SUCCESS) {
2183  *iexp |= exp;
2184  *comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
2185  }
2186  }
2187 }
2188 
2189 static AVBufferRef *vulkan_pool_alloc(void *opaque, size_t size)
2190 {
2191  int err;
2192  AVVkFrame *f;
2193  AVBufferRef *avbuf = NULL;
2194  AVHWFramesContext *hwfc = opaque;
2195  AVVulkanFramesContext *hwctx = hwfc->hwctx;
2197  VulkanFramesPriv *fp = hwfc->internal->priv;
2198  VkExportMemoryAllocateInfo eminfo[AV_NUM_DATA_POINTERS];
2199  VkExternalMemoryHandleTypeFlags e = 0x0;
2200 
2201  VkExternalMemoryImageCreateInfo eiinfo = {
2202  .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2203  .pNext = hwctx->create_pnext,
2204  };
2205 
2206 #ifdef _WIN32
2207  if (p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY)
2208  try_export_flags(hwfc, &eiinfo.handleTypes, &e, IsWindows8OrGreater()
2209  ? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
2210  : VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT);
2211 #else
2213  try_export_flags(hwfc, &eiinfo.handleTypes, &e,
2214  VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
2215 
2217  try_export_flags(hwfc, &eiinfo.handleTypes, &e,
2218  VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2219 #endif
2220 
2221  for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
2222  eminfo[i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
2223  eminfo[i].pNext = hwctx->alloc_pnext[i];
2224  eminfo[i].handleTypes = e;
2225  }
2226 
2227  err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
2228  eiinfo.handleTypes ? &eiinfo : NULL);
2229  if (err)
2230  return NULL;
2231 
2232  err = alloc_bind_mem(hwfc, f, eminfo, sizeof(*eminfo));
2233  if (err)
2234  goto fail;
2235 
2236  err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_WRITE);
2237  if (err)
2238  goto fail;
2239 
2240  avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
2241  vulkan_frame_free, hwfc, 0);
2242  if (!avbuf)
2243  goto fail;
2244 
2245  return avbuf;
2246 
2247 fail:
2248  vulkan_frame_free(hwfc, (uint8_t *)f);
2249  return NULL;
2250 }
2251 
2253 {
2254  VulkanFramesPriv *fp = hwfc->internal->priv;
2255 
2256  if (fp->modifier_info) {
2257  if (fp->modifier_info->pDrmFormatModifiers)
2258  av_freep(&fp->modifier_info->pDrmFormatModifiers);
2259  av_freep(&fp->modifier_info);
2260  }
2261 
2262  free_exec_ctx(hwfc, &fp->conv_ctx);
2263  free_exec_ctx(hwfc, &fp->upload_ctx);
2264  free_exec_ctx(hwfc, &fp->download_ctx);
2265 }
2266 
2268 {
2269  int err;
2270  AVVkFrame *f;
2271  AVVulkanFramesContext *hwctx = hwfc->hwctx;
2272  VulkanFramesPriv *fp = hwfc->internal->priv;
2273  AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
2275  const VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
2276  const int has_modifiers = !!(p->extensions & FF_VK_EXT_DRM_MODIFIER_FLAGS);
2277 
2278  /* Default tiling flags */
2279  hwctx->tiling = hwctx->tiling ? hwctx->tiling :
2280  has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
2281  p->use_linear_images ? VK_IMAGE_TILING_LINEAR :
2282  VK_IMAGE_TILING_OPTIMAL;
2283 
2284  if (!hwctx->usage)
2286 
2287  if (!(hwctx->flags & AV_VK_FRAME_FLAG_NONE)) {
2288  if (p->contiguous_planes == 1 ||
2289  ((p->contiguous_planes == -1) && p->dev_is_intel))
2291  }
2292 
2293  modifier_info = vk_find_struct(hwctx->create_pnext,
2294  VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT);
2295 
2296  /* Get the supported modifiers if the user has not given any. */
2297  if (has_modifiers && !modifier_info) {
2298  const VkFormat *fmt = av_vkfmt_from_pixfmt(hwfc->sw_format);
2299  VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
2300  FFVulkanFunctions *vk = &p->vkfn;
2301  VkDrmFormatModifierPropertiesEXT *mod_props;
2302  uint64_t *modifiers;
2303  int modifier_count = 0;
2304 
2305  VkDrmFormatModifierPropertiesListEXT mod_props_list = {
2306  .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
2307  .pNext = NULL,
2308  .drmFormatModifierCount = 0,
2309  .pDrmFormatModifierProperties = NULL,
2310  };
2311  VkFormatProperties2 prop = {
2312  .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
2313  .pNext = &mod_props_list,
2314  };
2315 
2316  /* Get all supported modifiers */
2317  vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->phys_dev, fmt[0], &prop);
2318 
2319  if (!mod_props_list.drmFormatModifierCount) {
2320  av_log(hwfc, AV_LOG_ERROR, "There are no supported modifiers for the given sw_format\n");
2321  return AVERROR(EINVAL);
2322  }
2323 
2324  /* Createa structure to hold the modifier list info */
2325  modifier_info = av_mallocz(sizeof(*modifier_info));
2326  if (!modifier_info)
2327  return AVERROR(ENOMEM);
2328 
2329  modifier_info->pNext = NULL;
2330  modifier_info->sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
2331 
2332  /* Add structure to the image creation pNext chain */
2333  if (!hwctx->create_pnext)
2334  hwctx->create_pnext = modifier_info;
2335  else
2336  vk_link_struct(hwctx->create_pnext, (void *)modifier_info);
2337 
2338  /* Backup the allocated struct to be freed later */
2339  fp->modifier_info = modifier_info;
2340 
2341  /* Allocate list of modifiers */
2342  modifiers = av_mallocz(mod_props_list.drmFormatModifierCount *
2343  sizeof(*modifiers));
2344  if (!modifiers)
2345  return AVERROR(ENOMEM);
2346 
2347  modifier_info->pDrmFormatModifiers = modifiers;
2348 
2349  /* Allocate a temporary list to hold all modifiers supported */
2350  mod_props = av_mallocz(mod_props_list.drmFormatModifierCount *
2351  sizeof(*mod_props));
2352  if (!mod_props)
2353  return AVERROR(ENOMEM);
2354 
2355  mod_props_list.pDrmFormatModifierProperties = mod_props;
2356 
2357  /* Finally get all modifiers from the device */
2358  vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->phys_dev, fmt[0], &prop);
2359 
2360  /* Reject any modifiers that don't match our requirements */
2361  for (int i = 0; i < mod_props_list.drmFormatModifierCount; i++) {
2362  if (!(mod_props[i].drmFormatModifierTilingFeatures & hwctx->usage))
2363  continue;
2364 
2365  modifiers[modifier_count++] = mod_props[i].drmFormatModifier;
2366  }
2367 
2368  if (!modifier_count) {
2369  av_log(hwfc, AV_LOG_ERROR, "None of the given modifiers supports"
2370  " the usage flags!\n");
2371  av_freep(&mod_props);
2372  return AVERROR(EINVAL);
2373  }
2374 
2375  modifier_info->drmFormatModifierCount = modifier_count;
2376  av_freep(&mod_props);
2377  }
2378 
2379  err = create_exec_ctx(hwfc, &fp->conv_ctx,
2380  dev_hwctx->queue_family_comp_index,
2381  dev_hwctx->nb_comp_queues);
2382  if (err)
2383  return err;
2384 
2385  err = create_exec_ctx(hwfc, &fp->upload_ctx,
2386  dev_hwctx->queue_family_tx_index,
2387  dev_hwctx->nb_tx_queues);
2388  if (err)
2389  return err;
2390 
2391  err = create_exec_ctx(hwfc, &fp->download_ctx,
2392  dev_hwctx->queue_family_tx_index, 1);
2393  if (err)
2394  return err;
2395 
2396  /* Test to see if allocation will fail */
2397  err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
2398  hwctx->create_pnext);
2399  if (err)
2400  return err;
2401 
2402  vulkan_frame_free(hwfc, (uint8_t *)f);
2403 
2404  /* If user did not specify a pool, hwfc->pool will be set to the internal one
2405  * in hwcontext.c just after this gets called */
2406  if (!hwfc->pool) {
2408  hwfc, vulkan_pool_alloc,
2409  NULL);
2410  if (!hwfc->internal->pool_internal)
2411  return AVERROR(ENOMEM);
2412  }
2413 
2414  return 0;
2415 }
2416 
2418 {
2419  frame->buf[0] = av_buffer_pool_get(hwfc->pool);
2420  if (!frame->buf[0])
2421  return AVERROR(ENOMEM);
2422 
2423  frame->data[0] = frame->buf[0]->data;
2424  frame->format = AV_PIX_FMT_VULKAN;
2425  frame->width = hwfc->width;
2426  frame->height = hwfc->height;
2427 
2428  return 0;
2429 }
2430 
2432  enum AVHWFrameTransferDirection dir,
2433  enum AVPixelFormat **formats)
2434 {
2435  enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
2436  if (!fmts)
2437  return AVERROR(ENOMEM);
2438 
2439  fmts[0] = hwfc->sw_format;
2440  fmts[1] = AV_PIX_FMT_NONE;
2441 
2442  *formats = fmts;
2443  return 0;
2444 }
2445 
2446 typedef struct VulkanMapping {
2448  int flags;
2449 } VulkanMapping;
2450 
2452 {
2453  VulkanMapping *map = hwmap->priv;
2454  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2455  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2457  FFVulkanFunctions *vk = &p->vkfn;
2458 
2459  /* Check if buffer needs flushing */
2460  if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
2461  !(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
2462  VkResult ret;
2463  VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
2464 
2465  for (int i = 0; i < planes; i++) {
2466  flush_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
2467  flush_ranges[i].memory = map->frame->mem[i];
2468  flush_ranges[i].size = VK_WHOLE_SIZE;
2469  }
2470 
2471  ret = vk->FlushMappedMemoryRanges(hwctx->act_dev, planes,
2472  flush_ranges);
2473  if (ret != VK_SUCCESS) {
2474  av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
2475  vk_ret2str(ret));
2476  }
2477  }
2478 
2479  for (int i = 0; i < planes; i++)
2480  vk->UnmapMemory(hwctx->act_dev, map->frame->mem[i]);
2481 
2482  av_free(map);
2483 }
2484 
2486  const AVFrame *src, int flags)
2487 {
2488  VkResult ret;
2489  int err, mapped_mem_count = 0, mem_planes = 0;
2490  AVVkFrame *f = (AVVkFrame *)src->data[0];
2491  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2492  AVVulkanFramesContext *hwfctx = hwfc->hwctx;
2493  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2495  FFVulkanFunctions *vk = &p->vkfn;
2496 
2498  if (!map)
2499  return AVERROR(EINVAL);
2500 
2501  if (src->format != AV_PIX_FMT_VULKAN) {
2502  av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
2503  av_get_pix_fmt_name(src->format));
2504  err = AVERROR(EINVAL);
2505  goto fail;
2506  }
2507 
2508  if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
2509  !(f->tiling == VK_IMAGE_TILING_LINEAR)) {
2510  av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
2511  "and linear!\n");
2512  err = AVERROR(EINVAL);
2513  goto fail;
2514  }
2515 
2516  dst->width = src->width;
2517  dst->height = src->height;
2518 
2519  mem_planes = hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY ? 1 : planes;
2520  for (int i = 0; i < mem_planes; i++) {
2521  ret = vk->MapMemory(hwctx->act_dev, f->mem[i], 0,
2522  VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
2523  if (ret != VK_SUCCESS) {
2524  av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
2525  vk_ret2str(ret));
2526  err = AVERROR_EXTERNAL;
2527  goto fail;
2528  }
2529  mapped_mem_count++;
2530  }
2531 
2532  if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
2533  for (int i = 0; i < planes; i++)
2534  dst->data[i] = dst->data[0] + f->offset[i];
2535  }
2536 
2537  /* Check if the memory contents matter */
2539  !(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
2540  VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
2541  for (int i = 0; i < planes; i++) {
2542  map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
2543  map_mem_ranges[i].size = VK_WHOLE_SIZE;
2544  map_mem_ranges[i].memory = f->mem[i];
2545  }
2546 
2547  ret = vk->InvalidateMappedMemoryRanges(hwctx->act_dev, planes,
2548  map_mem_ranges);
2549  if (ret != VK_SUCCESS) {
2550  av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
2551  vk_ret2str(ret));
2552  err = AVERROR_EXTERNAL;
2553  goto fail;
2554  }
2555  }
2556 
2557  for (int i = 0; i < planes; i++) {
2558  VkImageSubresource sub = {
2559  .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2560  };
2561  VkSubresourceLayout layout;
2562  vk->GetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
2563  dst->linesize[i] = layout.rowPitch;
2564  }
2565 
2566  map->frame = f;
2567  map->flags = flags;
2568 
2569  err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
2571  if (err < 0)
2572  goto fail;
2573 
2574  return 0;
2575 
2576 fail:
2577  for (int i = 0; i < mapped_mem_count; i++)
2578  vk->UnmapMemory(hwctx->act_dev, f->mem[i]);
2579 
2580  av_free(map);
2581  return err;
2582 }
2583 
2584 #if CONFIG_LIBDRM
2585 static void vulkan_unmap_from_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
2586 {
2587  AVVkFrame *f = hwmap->priv;
2588  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2589  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2591  FFVulkanFunctions *vk = &p->vkfn;
2592 
2593  VkSemaphoreWaitInfo wait_info = {
2594  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
2595  .flags = 0x0,
2596  .pSemaphores = f->sem,
2597  .pValues = f->sem_value,
2598  .semaphoreCount = planes,
2599  };
2600 
2601  vk->WaitSemaphores(hwctx->act_dev, &wait_info, UINT64_MAX);
2602 
2604 
2605  for (int i = 0; i < planes; i++) {
2606  vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
2607  vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
2608  vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
2609  }
2610 
2611  av_free(f);
2612 }
2613 
2614 static const struct {
2615  uint32_t drm_fourcc;
2616  VkFormat vk_format;
2617 } vulkan_drm_format_map[] = {
2618  { DRM_FORMAT_R8, VK_FORMAT_R8_UNORM },
2619  { DRM_FORMAT_R16, VK_FORMAT_R16_UNORM },
2620  { DRM_FORMAT_GR88, VK_FORMAT_R8G8_UNORM },
2621  { DRM_FORMAT_RG88, VK_FORMAT_R8G8_UNORM },
2622  { DRM_FORMAT_GR1616, VK_FORMAT_R16G16_UNORM },
2623  { DRM_FORMAT_RG1616, VK_FORMAT_R16G16_UNORM },
2624  { DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
2625  { DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
2626  { DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
2627  { DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
2628 
2629  // All these DRM_FORMATs were added in the same libdrm commit.
2630 #ifdef DRM_FORMAT_XYUV8888
2631  { DRM_FORMAT_XYUV8888, VK_FORMAT_R8G8B8A8_UNORM },
2632  { DRM_FORMAT_XVYU12_16161616, VK_FORMAT_R16G16B16A16_UNORM} ,
2633  // As we had to map XV36 to a 16bit Vulkan format, reverse mapping will
2634  // end up yielding Y416 as the DRM format, so we need to recognise it.
2635  { DRM_FORMAT_Y416, VK_FORMAT_R16G16B16A16_UNORM },
2636 #endif
2637 };
2638 
2639 static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
2640 {
2641  for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
2642  if (vulkan_drm_format_map[i].drm_fourcc == drm_fourcc)
2643  return vulkan_drm_format_map[i].vk_format;
2644  return VK_FORMAT_UNDEFINED;
2645 }
2646 
2647 static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **frame,
2648  const AVFrame *src)
2649 {
2650  int err = 0;
2651  VkResult ret;
2652  AVVkFrame *f;
2653  int bind_counts = 0;
2654  AVHWDeviceContext *ctx = hwfc->device_ctx;
2655  AVVulkanDeviceContext *hwctx = ctx->hwctx;
2656  VulkanDevicePriv *p = ctx->internal->priv;
2657  FFVulkanFunctions *vk = &p->vkfn;
2658  VulkanFramesPriv *fp = hwfc->internal->priv;
2659  const AVDRMFrameDescriptor *desc = (AVDRMFrameDescriptor *)src->data[0];
2660  VkBindImageMemoryInfo bind_info[AV_DRM_MAX_PLANES];
2661  VkBindImagePlaneMemoryInfo plane_info[AV_DRM_MAX_PLANES];
2662 
2663  for (int i = 0; i < desc->nb_layers; i++) {
2664  if (drm_to_vulkan_fmt(desc->layers[i].format) == VK_FORMAT_UNDEFINED) {
2665  av_log(ctx, AV_LOG_ERROR, "Unsupported DMABUF layer format %#08x!\n",
2666  desc->layers[i].format);
2667  return AVERROR(EINVAL);
2668  }
2669  }
2670 
2671  if (!(f = av_vk_frame_alloc())) {
2672  av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
2673  err = AVERROR(ENOMEM);
2674  goto fail;
2675  }
2676 
2677  f->tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
2678 
2679  for (int i = 0; i < desc->nb_layers; i++) {
2680  const int planes = desc->layers[i].nb_planes;
2681 
2682  /* Semaphore */
2683  VkSemaphoreTypeCreateInfo sem_type_info = {
2684  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
2685  .semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
2686  .initialValue = 0,
2687  };
2688  VkSemaphoreCreateInfo sem_spawn = {
2689  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2690  .pNext = &sem_type_info,
2691  };
2692 
2693  /* Image creation */
2694  VkSubresourceLayout ext_img_layouts[AV_DRM_MAX_PLANES];
2695  VkImageDrmFormatModifierExplicitCreateInfoEXT ext_img_mod_spec = {
2696  .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
2697  .drmFormatModifier = desc->objects[0].format_modifier,
2698  .drmFormatModifierPlaneCount = planes,
2699  .pPlaneLayouts = (const VkSubresourceLayout *)&ext_img_layouts,
2700  };
2701  VkExternalMemoryImageCreateInfo ext_img_spec = {
2702  .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2703  .pNext = &ext_img_mod_spec,
2704  .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2705  };
2706  VkImageCreateInfo create_info = {
2707  .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2708  .pNext = &ext_img_spec,
2709  .imageType = VK_IMAGE_TYPE_2D,
2710  .format = drm_to_vulkan_fmt(desc->layers[i].format),
2711  .extent.depth = 1,
2712  .mipLevels = 1,
2713  .arrayLayers = 1,
2714  .flags = 0x0, /* ALIAS flag is implicit for imported images */
2715  .tiling = f->tiling,
2716  .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
2717  .usage = VK_IMAGE_USAGE_SAMPLED_BIT |
2718  VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2719  .samples = VK_SAMPLE_COUNT_1_BIT,
2720  .pQueueFamilyIndices = p->qfs,
2721  .queueFamilyIndexCount = p->num_qfs,
2722  .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2723  VK_SHARING_MODE_EXCLUSIVE,
2724  };
2725 
2726  /* Image format verification */
2727  VkExternalImageFormatProperties ext_props = {
2728  .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
2729  };
2730  VkImageFormatProperties2 props_ret = {
2731  .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
2732  .pNext = &ext_props,
2733  };
2734  VkPhysicalDeviceImageDrmFormatModifierInfoEXT props_drm_mod = {
2735  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
2736  .drmFormatModifier = ext_img_mod_spec.drmFormatModifier,
2737  .pQueueFamilyIndices = create_info.pQueueFamilyIndices,
2738  .queueFamilyIndexCount = create_info.queueFamilyIndexCount,
2739  .sharingMode = create_info.sharingMode,
2740  };
2741  VkPhysicalDeviceExternalImageFormatInfo props_ext = {
2742  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
2743  .pNext = &props_drm_mod,
2744  .handleType = ext_img_spec.handleTypes,
2745  };
2746  VkPhysicalDeviceImageFormatInfo2 fmt_props = {
2747  .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
2748  .pNext = &props_ext,
2749  .format = create_info.format,
2750  .type = create_info.imageType,
2751  .tiling = create_info.tiling,
2752  .usage = create_info.usage,
2753  .flags = create_info.flags,
2754  };
2755 
2756  /* Check if importing is possible for this combination of parameters */
2757  ret = vk->GetPhysicalDeviceImageFormatProperties2(hwctx->phys_dev,
2758  &fmt_props, &props_ret);
2759  if (ret != VK_SUCCESS) {
2760  av_log(ctx, AV_LOG_ERROR, "Cannot map DRM frame to Vulkan: %s\n",
2761  vk_ret2str(ret));
2762  err = AVERROR_EXTERNAL;
2763  goto fail;
2764  }
2765 
2766  /* Set the image width/height */
2767  get_plane_wh(&create_info.extent.width, &create_info.extent.height,
2768  hwfc->sw_format, src->width, src->height, i);
2769 
2770  /* Set the subresource layout based on the layer properties */
2771  for (int j = 0; j < planes; j++) {
2772  ext_img_layouts[j].offset = desc->layers[i].planes[j].offset;
2773  ext_img_layouts[j].rowPitch = desc->layers[i].planes[j].pitch;
2774  ext_img_layouts[j].size = 0; /* The specs say so for all 3 */
2775  ext_img_layouts[j].arrayPitch = 0;
2776  ext_img_layouts[j].depthPitch = 0;
2777  }
2778 
2779  /* Create image */
2780  ret = vk->CreateImage(hwctx->act_dev, &create_info,
2781  hwctx->alloc, &f->img[i]);
2782  if (ret != VK_SUCCESS) {
2783  av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
2784  vk_ret2str(ret));
2785  err = AVERROR(EINVAL);
2786  goto fail;
2787  }
2788 
2789  ret = vk->CreateSemaphore(hwctx->act_dev, &sem_spawn,
2790  hwctx->alloc, &f->sem[i]);
2791  if (ret != VK_SUCCESS) {
2792  av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
2793  vk_ret2str(ret));
2794  return AVERROR_EXTERNAL;
2795  }
2796 
2797  /* We'd import a semaphore onto the one we created using
2798  * vkImportSemaphoreFdKHR but unfortunately neither DRM nor VAAPI
2799  * offer us anything we could import and sync with, so instead
2800  * just signal the semaphore we created. */
2801 
2802  f->layout[i] = create_info.initialLayout;
2803  f->access[i] = 0x0;
2804  f->sem_value[i] = 0;
2805  }
2806 
2807  for (int i = 0; i < desc->nb_objects; i++) {
2808  /* Memory requirements */
2809  VkImageMemoryRequirementsInfo2 req_desc = {
2810  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
2811  .image = f->img[i],
2812  };
2813  VkMemoryDedicatedRequirements ded_req = {
2814  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
2815  };
2816  VkMemoryRequirements2 req2 = {
2817  .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
2818  .pNext = &ded_req,
2819  };
2820 
2821  /* Allocation/importing */
2822  VkMemoryFdPropertiesKHR fdmp = {
2823  .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
2824  };
2825  VkImportMemoryFdInfoKHR idesc = {
2826  .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
2827  .fd = dup(desc->objects[i].fd),
2828  .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2829  };
2830  VkMemoryDedicatedAllocateInfo ded_alloc = {
2831  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
2832  .pNext = &idesc,
2833  .image = req_desc.image,
2834  };
2835 
2836  /* Get object properties */
2837  ret = vk->GetMemoryFdPropertiesKHR(hwctx->act_dev,
2838  VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2839  idesc.fd, &fdmp);
2840  if (ret != VK_SUCCESS) {
2841  av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
2842  vk_ret2str(ret));
2843  err = AVERROR_EXTERNAL;
2844  close(idesc.fd);
2845  goto fail;
2846  }
2847 
2848  vk->GetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req2);
2849 
2850  /* Only a single bit must be set, not a range, and it must match */
2851  req2.memoryRequirements.memoryTypeBits = fdmp.memoryTypeBits;
2852 
2853  err = alloc_mem(ctx, &req2.memoryRequirements,
2854  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
2855  (ded_req.prefersDedicatedAllocation ||
2856  ded_req.requiresDedicatedAllocation) ?
2857  &ded_alloc : ded_alloc.pNext,
2858  &f->flags, &f->mem[i]);
2859  if (err) {
2860  close(idesc.fd);
2861  return err;
2862  }
2863 
2864  f->size[i] = req2.memoryRequirements.size;
2865  }
2866 
2867  for (int i = 0; i < desc->nb_layers; i++) {
2868  const int planes = desc->layers[i].nb_planes;
2869  for (int j = 0; j < planes; j++) {
2870  VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2871  j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
2872  VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
2873 
2874  plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
2875  plane_info[bind_counts].pNext = NULL;
2876  plane_info[bind_counts].planeAspect = aspect;
2877 
2878  bind_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
2879  bind_info[bind_counts].pNext = planes > 1 ? &plane_info[bind_counts] : NULL;
2880  bind_info[bind_counts].image = f->img[i];
2881  bind_info[bind_counts].memory = f->mem[desc->layers[i].planes[j].object_index];
2882 
2883  /* Offset is already signalled via pPlaneLayouts above */
2884  bind_info[bind_counts].memoryOffset = 0;
2885 
2886  bind_counts++;
2887  }
2888  }
2889 
2890  /* Bind the allocated memory to the images */
2891  ret = vk->BindImageMemory2(hwctx->act_dev, bind_counts, bind_info);
2892  if (ret != VK_SUCCESS) {
2893  av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
2894  vk_ret2str(ret));
2895  err = AVERROR_EXTERNAL;
2896  goto fail;
2897  }
2898 
2899  err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_IMPORT);
2900  if (err)
2901  goto fail;
2902 
2903  *frame = f;
2904 
2905  return 0;
2906 
2907 fail:
2908  for (int i = 0; i < desc->nb_layers; i++) {
2909  vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
2910  vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
2911  }
2912  for (int i = 0; i < desc->nb_objects; i++)
2913  vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
2914 
2915  av_free(f);
2916 
2917  return err;
2918 }
2919 
2920 static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2921  const AVFrame *src, int flags)
2922 {
2923  int err = 0;
2924  AVVkFrame *f;
2925 
2926  if ((err = vulkan_map_from_drm_frame_desc(hwfc, &f, src)))
2927  return err;
2928 
2929  /* The unmapping function will free this */
2930  dst->data[0] = (uint8_t *)f;
2931  dst->width = src->width;
2932  dst->height = src->height;
2933 
2934  err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
2935  &vulkan_unmap_from_drm, f);
2936  if (err < 0)
2937  goto fail;
2938 
2939  av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
2940 
2941  return 0;
2942 
2943 fail:
2944  vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
2945  dst->data[0] = NULL;
2946  return err;
2947 }
2948 
2949 #if CONFIG_VAAPI
2950 static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
2951  AVFrame *dst, const AVFrame *src,
2952  int flags)
2953 {
2954  int err;
2955  AVFrame *tmp = av_frame_alloc();
2956  AVHWFramesContext *vaapi_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2957  AVVAAPIDeviceContext *vaapi_ctx = vaapi_fc->device_ctx->hwctx;
2958  VASurfaceID surface_id = (VASurfaceID)(uintptr_t)src->data[3];
2959 
2960  if (!tmp)
2961  return AVERROR(ENOMEM);
2962 
2963  /* We have to sync since like the previous comment said, no semaphores */
2964  vaSyncSurface(vaapi_ctx->display, surface_id);
2965 
2966  tmp->format = AV_PIX_FMT_DRM_PRIME;
2967 
2968  err = av_hwframe_map(tmp, src, flags);
2969  if (err < 0)
2970  goto fail;
2971 
2972  err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
2973  if (err < 0)
2974  goto fail;
2975 
2976  err = ff_hwframe_map_replace(dst, src);
2977 
2978 fail:
2979  av_frame_free(&tmp);
2980  return err;
2981 }
2982 #endif
2983 #endif
2984 
2985 #if CONFIG_CUDA
2986 static int vulkan_export_to_cuda(AVHWFramesContext *hwfc,
2987  AVBufferRef *cuda_hwfc,
2988  const AVFrame *frame)
2989 {
2990  int err;
2991  VkResult ret;
2992  AVVkFrame *dst_f;
2993  AVVkFrameInternal *dst_int;
2994  AVHWDeviceContext *ctx = hwfc->device_ctx;
2995  AVVulkanDeviceContext *hwctx = ctx->hwctx;
2996  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2998  VulkanDevicePriv *p = ctx->internal->priv;
2999  FFVulkanFunctions *vk = &p->vkfn;
3000 
3001  AVHWFramesContext *cuda_fc = (AVHWFramesContext*)cuda_hwfc->data;
3002  AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
3003  AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
3004  AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
3005  CudaFunctions *cu = cu_internal->cuda_dl;
3006  CUarray_format cufmt = desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
3007  CU_AD_FORMAT_UNSIGNED_INT8;
3008 
3009  dst_f = (AVVkFrame *)frame->data[0];
3010 
3011  dst_int = dst_f->internal;
3012  if (!dst_int || !dst_int->cuda_fc_ref) {
3013  if (!dst_f->internal)
3014  dst_f->internal = dst_int = av_mallocz(sizeof(*dst_f->internal));
3015 
3016  if (!dst_int)
3017  return AVERROR(ENOMEM);
3018 
3019  dst_int->cuda_fc_ref = av_buffer_ref(cuda_hwfc);
3020  if (!dst_int->cuda_fc_ref) {
3021  av_freep(&dst_f->internal);
3022  return AVERROR(ENOMEM);
3023  }
3024 
3025  for (int i = 0; i < planes; i++) {
3026  CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
3027  .offset = 0,
3028  .arrayDesc = {
3029  .Depth = 0,
3030  .Format = cufmt,
3031  .NumChannels = 1 + ((planes == 2) && i),
3032  .Flags = 0,
3033  },
3034  .numLevels = 1,
3035  };
3036  int p_w, p_h;
3037 
3038 #ifdef _WIN32
3039  CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
3040  .type = IsWindows8OrGreater()
3041  ? CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32
3042  : CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT,
3043  .size = dst_f->size[i],
3044  };
3045  VkMemoryGetWin32HandleInfoKHR export_info = {
3046  .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
3047  .memory = dst_f->mem[i],
3048  .handleType = IsWindows8OrGreater()
3049  ? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
3050  : VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
3051  };
3052  VkSemaphoreGetWin32HandleInfoKHR sem_export = {
3053  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
3054  .semaphore = dst_f->sem[i],
3055  .handleType = IsWindows8OrGreater()
3056  ? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
3057  : VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
3058  };
3059  CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
3060  .type = 10 /* TODO: CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TIMELINE_SEMAPHORE_WIN32 */,
3061  };
3062 
3063  ret = vk->GetMemoryWin32HandleKHR(hwctx->act_dev, &export_info,
3064  &ext_desc.handle.win32.handle);
3065  if (ret != VK_SUCCESS) {
3066  av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a Win32 Handle: %s!\n",
3067  vk_ret2str(ret));
3068  err = AVERROR_EXTERNAL;
3069  goto fail;
3070  }
3071  dst_int->ext_mem_handle[i] = ext_desc.handle.win32.handle;
3072 #else
3073  CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
3074  .type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
3075  .size = dst_f->size[i],
3076  };
3077  VkMemoryGetFdInfoKHR export_info = {
3078  .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
3079  .memory = dst_f->mem[i],
3080  .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
3081  };
3082  VkSemaphoreGetFdInfoKHR sem_export = {
3083  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
3084  .semaphore = dst_f->sem[i],
3085  .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
3086  };
3087  CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
3088  .type = 9 /* TODO: CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TIMELINE_SEMAPHORE_FD */,
3089  };
3090 
3091  ret = vk->GetMemoryFdKHR(hwctx->act_dev, &export_info,
3092  &ext_desc.handle.fd);
3093  if (ret != VK_SUCCESS) {
3094  av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD: %s!\n",
3095  vk_ret2str(ret));
3096  err = AVERROR_EXTERNAL;
3097  goto fail;
3098  }
3099 #endif
3100 
3101  ret = CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[i], &ext_desc));
3102  if (ret < 0) {
3103 #ifndef _WIN32
3104  close(ext_desc.handle.fd);
3105 #endif
3106  err = AVERROR_EXTERNAL;
3107  goto fail;
3108  }
3109 
3110  get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
3111  tex_desc.arrayDesc.Width = p_w;
3112  tex_desc.arrayDesc.Height = p_h;
3113 
3114  ret = CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[i],
3115  dst_int->ext_mem[i],
3116  &tex_desc));
3117  if (ret < 0) {
3118  err = AVERROR_EXTERNAL;
3119  goto fail;
3120  }
3121 
3122  ret = CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[i],
3123  dst_int->cu_mma[i], 0));
3124  if (ret < 0) {
3125  err = AVERROR_EXTERNAL;
3126  goto fail;
3127  }
3128 
3129 #ifdef _WIN32
3130  ret = vk->GetSemaphoreWin32HandleKHR(hwctx->act_dev, &sem_export,
3131  &ext_sem_desc.handle.win32.handle);
3132 #else
3133  ret = vk->GetSemaphoreFdKHR(hwctx->act_dev, &sem_export,
3134  &ext_sem_desc.handle.fd);
3135 #endif
3136  if (ret != VK_SUCCESS) {
3137  av_log(ctx, AV_LOG_ERROR, "Failed to export semaphore: %s\n",
3138  vk_ret2str(ret));
3139  err = AVERROR_EXTERNAL;
3140  goto fail;
3141  }
3142 #ifdef _WIN32
3143  dst_int->ext_sem_handle[i] = ext_sem_desc.handle.win32.handle;
3144 #endif
3145 
3146  ret = CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[i],
3147  &ext_sem_desc));
3148  if (ret < 0) {
3149 #ifndef _WIN32
3150  close(ext_sem_desc.handle.fd);
3151 #endif
3152  err = AVERROR_EXTERNAL;
3153  goto fail;
3154  }
3155  }
3156  }
3157 
3158  return 0;
3159 
3160 fail:
3161  vulkan_free_internal(dst_f);
3162  return err;
3163 }
3164 
3165 static int vulkan_transfer_data_from_cuda(AVHWFramesContext *hwfc,
3166  AVFrame *dst, const AVFrame *src)
3167 {
3168  int err;
3169  CUcontext dummy;
3170  AVVkFrame *dst_f;
3171  AVVkFrameInternal *dst_int;
3172  VulkanFramesPriv *fp = hwfc->internal->priv;
3173  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
3175 
3176  AVHWFramesContext *cuda_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
3177  AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
3178  AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
3179  AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
3180  CudaFunctions *cu = cu_internal->cuda_dl;
3181  CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
3182  CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
3183 
3184  dst_f = (AVVkFrame *)dst->data[0];
3185 
3186  err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_EXPORT);
3187  if (err < 0)
3188  return err;
3189 
3190  err = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
3191  if (err < 0)
3192  return err;
3193 
3194  err = vulkan_export_to_cuda(hwfc, src->hw_frames_ctx, dst);
3195  if (err < 0) {
3196  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3197  return err;
3198  }
3199 
3200  dst_int = dst_f->internal;
3201 
3202  for (int i = 0; i < planes; i++) {
3203  s_w_par[i].params.fence.value = dst_f->sem_value[i] + 0;
3204  s_s_par[i].params.fence.value = dst_f->sem_value[i] + 1;
3205  }
3206 
3207  err = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
3208  planes, cuda_dev->stream));
3209  if (err < 0)
3210  goto fail;
3211 
3212  for (int i = 0; i < planes; i++) {
3213  CUDA_MEMCPY2D cpy = {
3214  .srcMemoryType = CU_MEMORYTYPE_DEVICE,
3215  .srcDevice = (CUdeviceptr)src->data[i],
3216  .srcPitch = src->linesize[i],
3217  .srcY = 0,
3218 
3219  .dstMemoryType = CU_MEMORYTYPE_ARRAY,
3220  .dstArray = dst_int->cu_array[i],
3221  };
3222 
3223  int p_w, p_h;
3224  get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
3225 
3226  cpy.WidthInBytes = p_w * desc->comp[i].step;
3227  cpy.Height = p_h;
3228 
3229  err = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
3230  if (err < 0)
3231  goto fail;
3232  }
3233 
3234  err = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
3235  planes, cuda_dev->stream));
3236  if (err < 0)
3237  goto fail;
3238 
3239  for (int i = 0; i < planes; i++)
3240  dst_f->sem_value[i]++;
3241 
3242  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3243 
3244  av_log(hwfc, AV_LOG_VERBOSE, "Transfered CUDA image to Vulkan!\n");
3245 
3246  return err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_IMPORT);
3247 
3248 fail:
3249  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3250  vulkan_free_internal(dst_f);
3251  dst_f->internal = NULL;
3252  av_buffer_unref(&dst->buf[0]);
3253  return err;
3254 }
3255 #endif
3256 
3257 static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
3258  const AVFrame *src, int flags)
3259 {
3261 
3262  switch (src->format) {
3263 #if CONFIG_LIBDRM
3264 #if CONFIG_VAAPI
3265  case AV_PIX_FMT_VAAPI:
3267  return vulkan_map_from_vaapi(hwfc, dst, src, flags);
3268  else
3269  return AVERROR(ENOSYS);
3270 #endif
3271  case AV_PIX_FMT_DRM_PRIME:
3273  return vulkan_map_from_drm(hwfc, dst, src, flags);
3274  else
3275  return AVERROR(ENOSYS);
3276 #endif
3277  default:
3278  return AVERROR(ENOSYS);
3279  }
3280 }
3281 
3282 #if CONFIG_LIBDRM
3283 typedef struct VulkanDRMMapping {
3284  AVDRMFrameDescriptor drm_desc;
3285  AVVkFrame *source;
3286 } VulkanDRMMapping;
3287 
3288 static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
3289 {
3290  AVDRMFrameDescriptor *drm_desc = hwmap->priv;
3291 
3292  for (int i = 0; i < drm_desc->nb_objects; i++)
3293  close(drm_desc->objects[i].fd);
3294 
3295  av_free(drm_desc);
3296 }
3297 
3298 static inline uint32_t vulkan_fmt_to_drm(VkFormat vkfmt)
3299 {
3300  for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
3301  if (vulkan_drm_format_map[i].vk_format == vkfmt)
3302  return vulkan_drm_format_map[i].drm_fourcc;
3303  return DRM_FORMAT_INVALID;
3304 }
3305 
3306 static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
3307  const AVFrame *src, int flags)
3308 {
3309  int err = 0;
3310  VkResult ret;
3311  AVVkFrame *f = (AVVkFrame *)src->data[0];
3313  FFVulkanFunctions *vk = &p->vkfn;
3314  VulkanFramesPriv *fp = hwfc->internal->priv;
3315  AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
3316  AVVulkanFramesContext *hwfctx = hwfc->hwctx;
3317  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
3318  VkImageDrmFormatModifierPropertiesEXT drm_mod = {
3319  .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
3320  };
3321  VkSemaphoreWaitInfo wait_info = {
3322  .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
3323  .flags = 0x0,
3324  .semaphoreCount = planes,
3325  };
3326 
3327  AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
3328  if (!drm_desc)
3329  return AVERROR(ENOMEM);
3330 
3331  err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_EXPORT);
3332  if (err < 0)
3333  goto end;
3334 
3335  /* Wait for the operation to finish so we can cleanly export it. */
3336  wait_info.pSemaphores = f->sem;
3337  wait_info.pValues = f->sem_value;
3338 
3339  vk->WaitSemaphores(hwctx->act_dev, &wait_info, UINT64_MAX);
3340 
3341  err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
3342  if (err < 0)
3343  goto end;
3344 
3345  ret = vk->GetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img[0],
3346  &drm_mod);
3347  if (ret != VK_SUCCESS) {
3348  av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
3349  err = AVERROR_EXTERNAL;
3350  goto end;
3351  }
3352 
3353  for (int i = 0; (i < planes) && (f->mem[i]); i++) {
3354  VkMemoryGetFdInfoKHR export_info = {
3355  .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
3356  .memory = f->mem[i],
3357  .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
3358  };
3359 
3360  ret = vk->GetMemoryFdKHR(hwctx->act_dev, &export_info,
3361  &drm_desc->objects[i].fd);
3362  if (ret != VK_SUCCESS) {
3363  av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
3364  err = AVERROR_EXTERNAL;
3365  goto end;
3366  }
3367 
3368  drm_desc->nb_objects++;
3369  drm_desc->objects[i].size = f->size[i];
3370  drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
3371  }
3372 
3373  drm_desc->nb_layers = planes;
3374  for (int i = 0; i < drm_desc->nb_layers; i++) {
3375  VkSubresourceLayout layout;
3376  VkImageSubresource sub = {
3377  .aspectMask = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
3378  };
3379  VkFormat plane_vkfmt = av_vkfmt_from_pixfmt(hwfc->sw_format)[i];
3380 
3381  drm_desc->layers[i].format = vulkan_fmt_to_drm(plane_vkfmt);
3382  drm_desc->layers[i].nb_planes = 1;
3383 
3384  if (drm_desc->layers[i].format == DRM_FORMAT_INVALID) {
3385  av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
3386  err = AVERROR_PATCHWELCOME;
3387  goto end;
3388  }
3389 
3390  drm_desc->layers[i].planes[0].object_index = FFMIN(i, drm_desc->nb_objects - 1);
3391 
3392  if (f->tiling == VK_IMAGE_TILING_OPTIMAL)
3393  continue;
3394 
3395  vk->GetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
3396  drm_desc->layers[i].planes[0].offset = layout.offset;
3397  drm_desc->layers[i].planes[0].pitch = layout.rowPitch;
3398 
3399  if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY)
3400  drm_desc->layers[i].planes[0].offset += f->offset[i];
3401  }
3402 
3403  dst->width = src->width;
3404  dst->height = src->height;
3405  dst->data[0] = (uint8_t *)drm_desc;
3406 
3407  av_log(hwfc, AV_LOG_VERBOSE, "Mapped AVVkFrame to a DRM object!\n");
3408 
3409  return 0;
3410 
3411 end:
3412  av_free(drm_desc);
3413  return err;
3414 }
3415 
3416 #if CONFIG_VAAPI
3417 static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
3418  const AVFrame *src, int flags)
3419 {
3420  int err;
3421  AVFrame *tmp = av_frame_alloc();
3422  if (!tmp)
3423  return AVERROR(ENOMEM);
3424 
3425  tmp->format = AV_PIX_FMT_DRM_PRIME;
3426 
3427  err = vulkan_map_to_drm(hwfc, tmp, src, flags);
3428  if (err < 0)
3429  goto fail;
3430 
3431  err = av_hwframe_map(dst, tmp, flags);
3432  if (err < 0)
3433  goto fail;
3434 
3435  err = ff_hwframe_map_replace(dst, src);
3436 
3437 fail:
3438  av_frame_free(&tmp);
3439  return err;
3440 }
3441 #endif
3442 #endif
3443 
3445  const AVFrame *src, int flags)
3446 {
3448 
3449  switch (dst->format) {
3450 #if CONFIG_LIBDRM
3451  case AV_PIX_FMT_DRM_PRIME:
3453  return vulkan_map_to_drm(hwfc, dst, src, flags);
3454  else
3455  return AVERROR(ENOSYS);
3456 #if CONFIG_VAAPI
3457  case AV_PIX_FMT_VAAPI:
3459  return vulkan_map_to_vaapi(hwfc, dst, src, flags);
3460  else
3461  return AVERROR(ENOSYS);
3462 #endif
3463 #endif
3464  default:
3465  return vulkan_map_frame_to_mem(hwfc, dst, src, flags);
3466  }
3467 }
3468 
3469 typedef struct ImageBuffer {
3470  VkBuffer buf;
3471  VkDeviceMemory mem;
3472  VkMemoryPropertyFlagBits flags;
3474 } ImageBuffer;
3475 
3476 static void free_buf(void *opaque, uint8_t *data)
3477 {
3478  AVHWDeviceContext *ctx = opaque;
3479  AVVulkanDeviceContext *hwctx = ctx->hwctx;
3480  VulkanDevicePriv *p = ctx->internal->priv;
3481  FFVulkanFunctions *vk = &p->vkfn;
3482  ImageBuffer *vkbuf = (ImageBuffer *)data;
3483 
3484  if (vkbuf->buf)
3485  vk->DestroyBuffer(hwctx->act_dev, vkbuf->buf, hwctx->alloc);
3486  if (vkbuf->mem)
3487  vk->FreeMemory(hwctx->act_dev, vkbuf->mem, hwctx->alloc);
3488 
3489  av_free(data);
3490 }
3491 
3493 {
3494  size_t size;
3495  *stride = FFALIGN(*stride, p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
3496  size = height*(*stride);
3497  size = FFALIGN(size, p->props.properties.limits.minMemoryMapAlignment);
3498  return size;
3499 }
3500 
3502  VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags,
3503  size_t size, uint32_t req_memory_bits, int host_mapped,
3504  void *create_pnext, void *alloc_pnext)
3505 {
3506  int err;
3507  VkResult ret;
3508  int use_ded_mem;
3509  AVVulkanDeviceContext *hwctx = ctx->hwctx;
3510  VulkanDevicePriv *p = ctx->internal->priv;
3511  FFVulkanFunctions *vk = &p->vkfn;
3512 
3513  VkBufferCreateInfo buf_spawn = {
3514  .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
3515  .pNext = create_pnext,
3516  .usage = usage,
3517  .size = size,
3518  .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
3519  };
3520 
3521  VkBufferMemoryRequirementsInfo2 req_desc = {
3522  .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
3523  };
3524  VkMemoryDedicatedAllocateInfo ded_alloc = {
3525  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
3526  .pNext = alloc_pnext,
3527  };
3528  VkMemoryDedicatedRequirements ded_req = {
3529  .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
3530  };
3531  VkMemoryRequirements2 req = {
3532  .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
3533  .pNext = &ded_req,
3534  };
3535 
3536  ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
3537  if (!vkbuf)
3538  return AVERROR(ENOMEM);
3539 
3540  vkbuf->mapped_mem = host_mapped;
3541 
3542  ret = vk->CreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
3543  if (ret != VK_SUCCESS) {
3544  av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
3545  vk_ret2str(ret));
3546  err = AVERROR_EXTERNAL;
3547  goto fail;
3548  }
3549 
3550  req_desc.buffer = vkbuf->buf;
3551 
3552  vk->GetBufferMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
3553 
3554  /* In case the implementation prefers/requires dedicated allocation */
3555  use_ded_mem = ded_req.prefersDedicatedAllocation |
3556  ded_req.requiresDedicatedAllocation;
3557  if (use_ded_mem)
3558  ded_alloc.buffer = vkbuf->buf;
3559 
3560  /* Additional requirements imposed on us */
3561  if (req_memory_bits)
3562  req.memoryRequirements.memoryTypeBits &= req_memory_bits;
3563 
3564  err = alloc_mem(ctx, &req.memoryRequirements, flags,
3565  use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
3566  &vkbuf->flags, &vkbuf->mem);
3567  if (err)
3568  goto fail;
3569 
3570  ret = vk->BindBufferMemory(hwctx->act_dev, vkbuf->buf, vkbuf->mem, 0);
3571  if (ret != VK_SUCCESS) {
3572  av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
3573  vk_ret2str(ret));
3574  err = AVERROR_EXTERNAL;
3575  goto fail;
3576  }
3577 
3578  *buf = av_buffer_create((uint8_t *)vkbuf, sizeof(*vkbuf), free_buf, ctx, 0);
3579  if (!(*buf)) {
3580  err = AVERROR(ENOMEM);
3581  goto fail;
3582  }
3583 
3584  return 0;
3585 
3586 fail:
3587  free_buf(ctx, (uint8_t *)vkbuf);
3588  return err;
3589 }
3590 
3591 /* Skips mapping of host mapped buffers but still invalidates them */
3592 static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
3593  int nb_buffers, int invalidate)
3594 {
3595  VkResult ret;
3596  AVVulkanDeviceContext *hwctx = ctx->hwctx;
3597  VulkanDevicePriv *p = ctx->internal->priv;
3598  FFVulkanFunctions *vk = &p->vkfn;
3599  VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
3600  int invalidate_count = 0;
3601 
3602  for (int i = 0; i < nb_buffers; i++) {
3603  ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3604  if (vkbuf->mapped_mem)
3605  continue;
3606 
3607  ret = vk->MapMemory(hwctx->act_dev, vkbuf->mem, 0,
3608  VK_WHOLE_SIZE, 0, (void **)&mem[i]);
3609  if (ret != VK_SUCCESS) {
3610  av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
3611  vk_ret2str(ret));
3612  return AVERROR_EXTERNAL;
3613  }
3614  }
3615 
3616  if (!invalidate)
3617  return 0;
3618 
3619  for (int i = 0; i < nb_buffers; i++) {
3620  ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3621  const VkMappedMemoryRange ival_buf = {
3622  .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
3623  .memory = vkbuf->mem,
3624  .size = VK_WHOLE_SIZE,
3625  };
3626 
3627  /* For host imported memory Vulkan says to use platform-defined
3628  * sync methods, but doesn't really say not to call flush or invalidate
3629  * on original host pointers. It does explicitly allow to do that on
3630  * host-mapped pointers which are then mapped again using vkMapMemory,
3631  * but known implementations return the original pointers when mapped
3632  * again. */
3633  if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
3634  continue;
3635 
3636  invalidate_ctx[invalidate_count++] = ival_buf;
3637  }
3638 
3639  if (invalidate_count) {
3640  ret = vk->InvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
3641  invalidate_ctx);
3642  if (ret != VK_SUCCESS)
3643  av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
3644  vk_ret2str(ret));
3645  }
3646 
3647  return 0;
3648 }
3649 
3651  int nb_buffers, int flush)
3652 {
3653  int err = 0;
3654  VkResult ret;
3655  AVVulkanDeviceContext *hwctx = ctx->hwctx;
3656  VulkanDevicePriv *p = ctx->internal->priv;
3657  FFVulkanFunctions *vk = &p->vkfn;
3658  VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
3659  int flush_count = 0;
3660 
3661  if (flush) {
3662  for (int i = 0; i < nb_buffers; i++) {
3663  ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3664  const VkMappedMemoryRange flush_buf = {
3665  .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
3666  .memory = vkbuf->mem,
3667  .size = VK_WHOLE_SIZE,
3668  };
3669 
3670  if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
3671  continue;
3672 
3673  flush_ctx[flush_count++] = flush_buf;
3674  }
3675  }
3676 
3677  if (flush_count) {
3678  ret = vk->FlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
3679  if (ret != VK_SUCCESS) {
3680  av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
3681  vk_ret2str(ret));
3682  err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
3683  }
3684  }
3685 
3686  for (int i = 0; i < nb_buffers; i++) {
3687  ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3688  if (vkbuf->mapped_mem)
3689  continue;
3690 
3691  vk->UnmapMemory(hwctx->act_dev, vkbuf->mem);
3692  }
3693 
3694  return err;
3695 }
3696 
3698  AVBufferRef **bufs, size_t *buf_offsets,
3699  const int *buf_stride, int w,
3700  int h, enum AVPixelFormat pix_fmt, int to_buf)
3701 {
3702  int err;
3703  AVVkFrame *frame = (AVVkFrame *)f->data[0];
3704  VulkanFramesPriv *fp = hwfc->internal->priv;
3706  FFVulkanFunctions *vk = &p->vkfn;
3707 
3708  int bar_num = 0;
3709  VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
3710 
3711  const int planes = av_pix_fmt_count_planes(pix_fmt);
3713 
3714  VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
3715  VulkanExecCtx *ectx = to_buf ? &fp->download_ctx : &fp->upload_ctx;
3716  VkCommandBuffer cmd_buf = get_buf_exec_ctx(hwfc, ectx);
3717 
3718  uint64_t sem_signal_values[AV_NUM_DATA_POINTERS];
3719 
3720  VkTimelineSemaphoreSubmitInfo s_timeline_sem_info = {
3721  .sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
3722  .pWaitSemaphoreValues = frame->sem_value,
3723  .pSignalSemaphoreValues = sem_signal_values,
3724  .waitSemaphoreValueCount = planes,
3725  .signalSemaphoreValueCount = planes,
3726  };
3727 
3728  VkSubmitInfo s_info = {
3729  .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
3730  .pNext = &s_timeline_sem_info,
3731  .pSignalSemaphores = frame->sem,
3732  .pWaitSemaphores = frame->sem,
3733  .pWaitDstStageMask = sem_wait_dst,
3734  .signalSemaphoreCount = planes,
3735  .waitSemaphoreCount = planes,
3736  };
3737 
3738  for (int i = 0; i < planes; i++)
3739  sem_signal_values[i] = frame->sem_value[i] + 1;
3740 
3741  if ((err = wait_start_exec_ctx(hwfc, ectx)))
3742  return err;
3743 
3744  /* Change the image layout to something more optimal for transfers */
3745  for (int i = 0; i < planes; i++) {
3746  VkImageLayout new_layout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
3747  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
3748  VkAccessFlags new_access = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
3749  VK_ACCESS_TRANSFER_WRITE_BIT;
3750 
3751  sem_wait_dst[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
3752 
3753  /* If the layout matches and we have read access skip the barrier */
3754  if ((frame->layout[i] == new_layout) && (frame->access[i] & new_access))
3755  continue;
3756 
3757  img_bar[bar_num].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
3758  img_bar[bar_num].srcAccessMask = 0x0;
3759  img_bar[bar_num].dstAccessMask = new_access;
3760  img_bar[bar_num].oldLayout = frame->layout[i];
3761  img_bar[bar_num].newLayout = new_layout;
3762  img_bar[bar_num].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3763  img_bar[bar_num].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3764  img_bar[bar_num].image = frame->img[i];
3765  img_bar[bar_num].subresourceRange.levelCount = 1;
3766  img_bar[bar_num].subresourceRange.layerCount = 1;
3767  img_bar[bar_num].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3768 
3769  frame->layout[i] = img_bar[bar_num].newLayout;
3770  frame->access[i] = img_bar[bar_num].dstAccessMask;
3771 
3772  bar_num++;
3773  }
3774 
3775  if (bar_num)
3776  vk->CmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3777  VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
3778  0, NULL, 0, NULL, bar_num, img_bar);
3779 
3780  /* Schedule a copy for each plane */
3781  for (int i = 0; i < planes; i++) {
3782  ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3783  VkBufferImageCopy buf_reg = {
3784  .bufferOffset = buf_offsets[i],
3785  .bufferRowLength = buf_stride[i] / desc->comp[i].step,
3786  .imageSubresource.layerCount = 1,
3787  .imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
3788  .imageOffset = { 0, 0, 0, },
3789  };
3790 
3791  int p_w, p_h;
3792  get_plane_wh(&p_w, &p_h, pix_fmt, w, h, i);
3793 
3794  buf_reg.bufferImageHeight = p_h;
3795  buf_reg.imageExtent = (VkExtent3D){ p_w, p_h, 1, };
3796 
3797  if (to_buf)
3798  vk->CmdCopyImageToBuffer(cmd_buf, frame->img[i], frame->layout[i],
3799  vkbuf->buf, 1, &buf_reg);
3800  else
3801  vk->CmdCopyBufferToImage(cmd_buf, vkbuf->buf, frame->img[i],
3802  frame->layout[i], 1, &buf_reg);
3803  }
3804 
3805  /* When uploading, do this asynchronously if the source is refcounted by
3806  * keeping the buffers as a submission dependency.
3807  * The hwcontext is guaranteed to not be freed until all frames are freed
3808  * in the frames_unint function.
3809  * When downloading to buffer, do this synchronously and wait for the
3810  * queue submission to finish executing */
3811  if (!to_buf) {
3812  int ref;
3813  for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
3814  if (!f->buf[ref])
3815  break;
3816  if ((err = add_buf_dep_exec_ctx(hwfc, ectx, &f->buf[ref], 1)))
3817  return err;
3818  }
3819  if (ref && (err = add_buf_dep_exec_ctx(hwfc, ectx, bufs, planes)))
3820  return err;
3821  return submit_exec_ctx(hwfc, ectx, &s_info, frame, !ref);
3822  } else {
3823  return submit_exec_ctx(hwfc, ectx, &s_info, frame, 1);
3824  }
3825 }
3826 
3827 static int vulkan_transfer_data(AVHWFramesContext *hwfc, const AVFrame *vkf,
3828  const AVFrame *swf, int from)
3829 {
3830  int err = 0;
3831  VkResult ret;
3832  AVVkFrame *f = (AVVkFrame *)vkf->data[0];
3833  AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
3834  AVVulkanDeviceContext *hwctx = dev_ctx->hwctx;
3836  FFVulkanFunctions *vk = &p->vkfn;
3837 
3838  AVFrame tmp;
3839  AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
3840  size_t buf_offsets[AV_NUM_DATA_POINTERS] = { 0 };
3841 
3842  int p_w, p_h;
3843  const int planes = av_pix_fmt_count_planes(swf->format);
3844 
3845  int host_mapped[AV_NUM_DATA_POINTERS] = { 0 };
3846  const int map_host = !!(p->extensions & FF_VK_EXT_EXTERNAL_HOST_MEMORY);
3847 
3848  if ((swf->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(swf->format))) {
3849  av_log(hwfc, AV_LOG_ERROR, "Unsupported software frame pixel format!\n");
3850  return AVERROR(EINVAL);
3851  }
3852 
3853  if (swf->width > hwfc->width || swf->height > hwfc->height)
3854  return AVERROR(EINVAL);
3855 
3856  /* For linear, host visiable images */
3857  if (f->tiling == VK_IMAGE_TILING_LINEAR &&
3858  f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
3859  AVFrame *map = av_frame_alloc();
3860  if (!map)
3861  return AVERROR(ENOMEM);
3862  map->format = swf->format;
3863 
3865  if (err)
3866  return err;
3867 
3868  err = av_frame_copy((AVFrame *)(from ? swf : map), from ? map : swf);
3869  av_frame_free(&map);
3870  return err;
3871  }
3872 
3873  /* Create buffers */
3874  for (int i = 0; i < planes; i++) {
3875  size_t req_size;
3876 
3877  VkExternalMemoryBufferCreateInfo create_desc = {
3878  .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
3879  .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3880  };
3881 
3882  VkImportMemoryHostPointerInfoEXT import_desc = {
3883  .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
3884  .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3885  };
3886 
3887  VkMemoryHostPointerPropertiesEXT p_props = {
3888  .sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
3889  };
3890 
3891  get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3892 
3893  tmp.linesize[i] = FFABS(swf->linesize[i]);
3894 
3895  /* Do not map images with a negative stride */
3896  if (map_host && swf->linesize[i] > 0) {
3897  size_t offs;
3898  offs = (uintptr_t)swf->data[i] % p->hprops.minImportedHostPointerAlignment;
3899  import_desc.pHostPointer = swf->data[i] - offs;
3900 
3901  /* We have to compensate for the few extra bytes of padding we
3902  * completely ignore at the start */
3903  req_size = FFALIGN(offs + tmp.linesize[i] * p_h,
3904  p->hprops.minImportedHostPointerAlignment);
3905 
3906  ret = vk->GetMemoryHostPointerPropertiesEXT(hwctx->act_dev,
3907  import_desc.handleType,
3908  import_desc.pHostPointer,
3909  &p_props);
3910 
3911  if (ret == VK_SUCCESS) {
3912  host_mapped[i] = 1;
3913  buf_offsets[i] = offs;
3914  }
3915  }
3916 
3917  if (!host_mapped[i])
3918  req_size = get_req_buffer_size(p, &tmp.linesize[i], p_h);
3919 
3920  err = create_buf(dev_ctx, &bufs[i],
3921  from ? VK_BUFFER_USAGE_TRANSFER_DST_BIT :
3922  VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
3923  VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
3924  req_size, p_props.memoryTypeBits, host_mapped[i],
3925  host_mapped[i] ? &create_desc : NULL,
3926  host_mapped[i] ? &import_desc : NULL);
3927  if (err)
3928  goto end;
3929  }
3930 
3931  if (!from) {
3932  /* Map, copy image TO buffer (which then goes to the VkImage), unmap */
3933  if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
3934  goto end;
3935 
3936  for (int i = 0; i < planes; i++) {
3937  if (host_mapped[i])
3938  continue;
3939 
3940  get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3941 
3942  av_image_copy_plane(tmp.data[i], tmp.linesize[i],
3943  (const uint8_t *)swf->data[i], swf->linesize[i],
3944  FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
3945  p_h);
3946  }
3947 
3948  if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
3949  goto end;
3950  }
3951 
3952  /* Copy buffers into/from image */
3953  err = transfer_image_buf(hwfc, vkf, bufs, buf_offsets, tmp.linesize,
3954  swf->width, swf->height, swf->format, from);
3955 
3956  if (from) {
3957  /* Map, copy buffer (which came FROM the VkImage) to the frame, unmap */
3958  if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
3959  goto end;
3960 
3961  for (int i = 0; i < planes; i++) {
3962  if (host_mapped[i])
3963  continue;
3964 
3965  get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3966 
3967  av_image_copy_plane_uc_from(swf->data[i], swf->linesize[i],
3968  (const uint8_t *)tmp.data[i], tmp.linesize[i],
3969  FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
3970  p_h);
3971  }
3972 
3973  if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
3974  goto end;
3975  }
3976 
3977 end:
3978  for (int i = 0; i < planes; i++)
3979  av_buffer_unref(&bufs[i]);
3980 
3981  return err;
3982 }
3983 
3985  const AVFrame *src)
3986 {
3988 
3989  switch (src->format) {
3990 #if CONFIG_CUDA
3991  case AV_PIX_FMT_CUDA:
3992 #ifdef _WIN32
3993  if ((p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY) &&
3994  (p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM))
3995 #else
3996  if ((p->extensions & FF_VK_EXT_EXTERNAL_FD_MEMORY) &&
3997  (p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM))
3998 #endif
3999  return vulkan_transfer_data_from_cuda(hwfc, dst, src);
4000 #endif
4001  default:
4002  if (src->hw_frames_ctx)
4003  return AVERROR(ENOSYS);
4004  else
4005  return vulkan_transfer_data(hwfc, dst, src, 0);
4006  }
4007 }
4008 
4009 #if CONFIG_CUDA
4010 static int vulkan_transfer_data_to_cuda(AVHWFramesContext *hwfc, AVFrame *dst,
4011  const AVFrame *src)
4012 {
4013  int err;
4014  CUcontext dummy;
4015  AVVkFrame *dst_f;
4016  AVVkFrameInternal *dst_int;
4017  VulkanFramesPriv *fp = hwfc->internal->priv;
4018  const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
4020 
4022  AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
4023  AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
4024  AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
4025  CudaFunctions *cu = cu_internal->cuda_dl;
4026  CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
4027  CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
4028 
4029  dst_f = (AVVkFrame *)src->data[0];
4030 
4031  err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_EXPORT);
4032  if (err < 0)
4033  return err;
4034 
4035  err = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
4036  if (err < 0)
4037  return err;
4038 
4039  err = vulkan_export_to_cuda(hwfc, dst->hw_frames_ctx, src);
4040  if (err < 0) {
4041  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
4042  return err;
4043  }
4044 
4045  dst_int = dst_f->internal;
4046 
4047  for (int i = 0; i < planes; i++) {
4048  s_w_par[i].params.fence.value = dst_f->sem_value[i] + 0;
4049  s_s_par[i].params.fence.value = dst_f->sem_value[i] + 1;
4050  }
4051 
4052  err = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
4053  planes, cuda_dev->stream));
4054  if (err < 0)
4055  goto fail;
4056 
4057  for (int i = 0; i < planes; i++) {
4058  CUDA_MEMCPY2D cpy = {
4059  .dstMemoryType = CU_MEMORYTYPE_DEVICE,
4060  .dstDevice = (CUdeviceptr)dst->data[i],
4061  .dstPitch = dst->linesize[i],
4062  .dstY = 0,
4063 
4064  .srcMemoryType = CU_MEMORYTYPE_ARRAY,
4065  .srcArray = dst_int->cu_array[i],
4066  };
4067 
4068  int w, h;
4069  get_plane_wh(&w, &h, hwfc->sw_format, hwfc->width, hwfc->height, i);
4070 
4071  cpy.WidthInBytes = w * desc->comp[i].step;
4072  cpy.Height = h;
4073 
4074  err = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
4075  if (err < 0)
4076  goto fail;
4077  }
4078 
4079  err = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
4080  planes, cuda_dev->stream));
4081  if (err < 0)
4082  goto fail;
4083 
4084  for (int i = 0; i < planes; i++)
4085  dst_f->sem_value[i]++;
4086 
4087  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
4088 
4089  av_log(hwfc, AV_LOG_VERBOSE, "Transfered Vulkan image to CUDA!\n");
4090 
4091  return prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_IMPORT);
4092 
4093 fail:
4094  CHECK_CU(cu->cuCtxPopCurrent(&dummy));
4095  vulkan_free_internal(dst_f);
4096  dst_f->internal = NULL;
4097  av_buffer_unref(&dst->buf[0]);
4098  return err;
4099 }
4100 #endif
4101 
4103  const AVFrame *src)
4104 {
4106 
4107  switch (dst->format) {
4108 #if CONFIG_CUDA
4109  case AV_PIX_FMT_CUDA:
4110 #ifdef _WIN32
4111  if ((p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY) &&
4112  (p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM))
4113 #else
4114  if ((p->extensions & FF_VK_EXT_EXTERNAL_FD_MEMORY) &&
4115  (p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM))
4116 #endif
4117  return vulkan_transfer_data_to_cuda(hwfc, dst, src);
4118 #endif
4119  default:
4120  if (dst->hw_frames_ctx)
4121  return AVERROR(ENOSYS);
4122  else
4123  return vulkan_transfer_data(hwfc, src, dst, 1);
4124  }
4125 }
4126 
4128  AVHWFramesContext *src_fc, int flags)
4129 {
4130  return vulkan_frames_init(dst_fc);
4131 }
4132 
4134 {
4135  return av_mallocz(sizeof(AVVkFrame));
4136 }
4137 
4140  .name = "Vulkan",
4141 
4142  .device_hwctx_size = sizeof(AVVulkanDeviceContext),
4143  .device_priv_size = sizeof(VulkanDevicePriv),
4144  .frames_hwctx_size = sizeof(AVVulkanFramesContext),
4145  .frames_priv_size = sizeof(VulkanFramesPriv),
4146 
4147  .device_init = &vulkan_device_init,
4148  .device_create = &vulkan_device_create,
4149  .device_derive = &vulkan_device_derive,
4150 
4151  .frames_get_constraints = &vulkan_frames_get_constraints,
4152  .frames_init = vulkan_frames_init,
4153  .frames_get_buffer = vulkan_get_buffer,
4154  .frames_uninit = vulkan_frames_uninit,
4155 
4156  .transfer_get_formats = vulkan_transfer_get_formats,
4157  .transfer_data_to = vulkan_transfer_data_to,
4158  .transfer_data_from = vulkan_transfer_data_from,
4159 
4160  .map_to = vulkan_map_to,
4161  .map_from = vulkan_map_from,
4162  .frames_derive_to = &vulkan_frames_derive_to,
4163 
4164  .pix_fmts = (const enum AVPixelFormat []) {
4167  },
4168 };
vulkan_loader.h
FF_VK_EXT_NO_FLAG
@ FF_VK_EXT_NO_FLAG
Definition: vulkan_functions.h:41
VulkanDevicePriv::extensions
FFVulkanExtensions extensions
Definition: hwcontext_vulkan.c:101
formats
formats
Definition: signature.h:48
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:502
load_libvulkan
static int load_libvulkan(AVHWDeviceContext *ctx)
Definition: hwcontext_vulkan.c:302
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
vulkan_device_init
static int vulkan_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_vulkan.c:1429
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:481
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVVulkanDeviceContext::phys_dev
VkPhysicalDevice phys_dev
Physical device.
Definition: hwcontext_vulkan.h:63
check_validation_layers
static int check_validation_layers(AVHWDeviceContext *ctx, AVDictionary *opts, const char *const **dst, uint32_t *num, int *debug_mode)
Definition: hwcontext_vulkan.c:556
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:253
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
VulkanDevicePriv::libvulkan
void * libvulkan
Definition: hwcontext_vulkan.c:81
planes
static const struct @346 planes[]
VulkanOptExtension::name
const char * name
Definition: hwcontext_vulkan.c:337
VulkanDevicePriv::qfs
uint32_t qfs[5]
Definition: hwcontext_vulkan.c:94
AVCUDADeviceContextInternal
Definition: hwcontext_cuda_internal.h:31
VulkanDevicePriv::num_qfs
int num_qfs
Definition: hwcontext_vulkan.c:95
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
VulkanQueueCtx::nb_buf_deps
int nb_buf_deps
Definition: hwcontext_vulkan.c:67
VulkanMapping::flags
int flags
Definition: hwcontext_vulkan.c:2448
SETUP_QUEUE
#define SETUP_QUEUE(qf_idx)
hwcontext_cuda_internal.h
out
FILE * out
Definition: movenc.c:54
vk_link_struct
static void vk_link_struct(void *chain, void *in)
Definition: hwcontext_vulkan.c:263
AV_PIX_FMT_BGR32
#define AV_PIX_FMT_BGR32
Definition: pixfmt.h:434
vulkan_transfer_data_to
static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vulkan.c:3984
sub
static float sub(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:31
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2888
CHECK_QUEUE
#define CHECK_QUEUE(type, required, fidx, ctx_qf, qc)
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
vulkan_frames_get_constraints
static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_vulkan.c:1617
vulkan_transfer_data
static int vulkan_transfer_data(AVHWFramesContext *hwfc, const AVFrame *vkf, const AVFrame *swf, int from)
Definition: hwcontext_vulkan.c:3827
av_unused
#define av_unused
Definition: attributes.h:131
vulkan_frames_derive_to
static int vulkan_frames_derive_to(AVHWFramesContext *dst_fc, AVHWFramesContext *src_fc, int flags)
Definition: hwcontext_vulkan.c:4127
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
pixdesc.h
AVVulkanDeviceContext::get_proc_addr
PFN_vkGetInstanceProcAddr get_proc_addr
Pointer to the instance-provided vkGetInstanceProcAddr loading function.
Definition: hwcontext_vulkan.h:53
optional_device_exts
static const VulkanOptExtension optional_device_exts[]
Definition: hwcontext_vulkan.c:345
VulkanFramesPriv::download_ctx
VulkanExecCtx download_ctx
Definition: hwcontext_vulkan.c:122
AVFrame::width
int width
Definition: frame.h:402
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:501
w
uint8_t w
Definition: llviddspenc.c:38
VulkanQueueCtx::fence
VkFence fence
Definition: hwcontext_vulkan.c:61
AVDRMFrameDescriptor::nb_layers
int nb_layers
Number of layers in the frame.
Definition: hwcontext_drm.h:145
AV_PIX_FMT_DRM_PRIME
@ AV_PIX_FMT_DRM_PRIME
DRM-managed buffers exposed through PRIME buffer sharing.
Definition: pixfmt.h:348
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:496
AVVulkanFramesContext::create_pnext
void * create_pnext
Extension data for image creation.
Definition: hwcontext_vulkan.h:181
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:794
data
const char data[16]
Definition: mxf.c:146
linear
static int linear(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:132
VulkanDevicePriv::vkfn
FFVulkanFunctions vkfn
Definition: hwcontext_vulkan.c:82
AVVulkanDeviceContext::queue_family_decode_index
int queue_family_decode_index
Queue family index for video decode ops, and the amount of queues enabled.
Definition: hwcontext_vulkan.h:136
try_export_flags
static void try_export_flags(AVHWFramesContext *hwfc, VkExternalMemoryHandleTypeFlags *comp_handle_types, VkExternalMemoryHandleTypeFlagBits *iexp, VkExternalMemoryHandleTypeFlagBits exp)
Definition: hwcontext_vulkan.c:2127
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
AVVulkanDeviceContext::inst
VkInstance inst
Vulkan instance.
Definition: hwcontext_vulkan.h:58
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
free_exec_ctx
static void free_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
Definition: hwcontext_vulkan.c:1118
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
vulkan_map_from
static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_vulkan.c:3444
AVHWDeviceContext::internal
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:71
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:69
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:32
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:742
VulkanExecCtx::cur_queue_idx
int cur_queue_idx
Definition: hwcontext_vulkan.c:76
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:151
av_popcount
#define av_popcount
Definition: common.h:149
AVDRMFrameDescriptor
DRM frame descriptor.
Definition: hwcontext_drm.h:133
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
VulkanExecCtx::queues
VulkanQueueCtx * queues
Definition: hwcontext_vulkan.c:74
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:539
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:497
AVVulkanDeviceContext::nb_decode_queues
int nb_decode_queues
Definition: hwcontext_vulkan.h:137
prepare_frame
static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx, AVVkFrame *frame, enum PrepMode pmode)
Definition: hwcontext_vulkan.c:1909
AV_PIX_FMT_VULKAN
@ AV_PIX_FMT_VULKAN
Vulkan hardware images.
Definition: pixfmt.h:376
VulkanDeviceSelection::uuid
uint8_t uuid[VK_UUID_SIZE]
Definition: hwcontext_vulkan.c:754
AVVulkanDeviceContext::queue_family_index
int queue_family_index
Queue family index for graphics operations, and the number of queues enabled for it.
Definition: hwcontext_vulkan.h:106
AVHWFramesInternal::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:118
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
av_image_copy_plane
void av_image_copy_plane(uint8_t *dst, int dst_linesize, const uint8_t *src, int src_linesize, int bytewidth, int height)
Copy image plane from src to dst.
Definition: imgutils.c:374
VulkanExecCtx::bufs
VkCommandBuffer * bufs
Definition: hwcontext_vulkan.c:73
alloc_mem
static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req, VkMemoryPropertyFlagBits req_flags, const void *alloc_extension, VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
Definition: hwcontext_vulkan.c:1663
AV_HWDEVICE_TYPE_VULKAN
@ AV_HWDEVICE_TYPE_VULKAN
Definition: hwcontext.h:39
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
AV_PIX_FMT_NB
@ AV_PIX_FMT_NB
number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of...
Definition: pixfmt.h:423
AV_HWDEVICE_TYPE_CUDA
@ AV_HWDEVICE_TYPE_CUDA
Definition: hwcontext.h:30
AVDRMDeviceContext::fd
int fd
File descriptor of DRM device.
Definition: hwcontext_drm.h:166
VulkanExecCtx::pool
VkCommandPool pool
Definition: hwcontext_vulkan.c:72
av_pix_fmt_count_planes
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2928
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:524
VulkanDevicePriv::hprops
VkPhysicalDeviceExternalMemoryHostPropertiesEXT hprops
Definition: hwcontext_vulkan.c:87
vulkan_device_derive
static int vulkan_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *src_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_vulkan.c:1546
VulkanOptExtension::flag
FFVulkanExtensions flag
Definition: hwcontext_vulkan.c:338
VulkanExecCtx::nb_queues
int nb_queues
Definition: hwcontext_vulkan.c:75
AVVulkanDeviceContext::alloc
const VkAllocationCallbacks * alloc
Custom memory allocator, else NULL.
Definition: hwcontext_vulkan.h:46
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:205
AVVulkanDeviceContext::queue_family_comp_index
int queue_family_comp_index
Queue family index for compute operations and the number of queues enabled.
Definition: hwcontext_vulkan.h:120
fail
#define fail()
Definition: checkasm.h:134
AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY
@ AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY
Definition: hwcontext_vulkan.h:151
AVDRMLayerDescriptor::nb_planes
int nb_planes
Number of planes in the layer.
Definition: hwcontext_drm.h:106
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:503
AVVulkanFramesContext::flags
AVVkFrameFlags flags
A combination of AVVkFrameFlags.
Definition: hwcontext_vulkan.h:197
VulkanDevicePriv
Definition: hwcontext_vulkan.c:79
AVDRMLayerDescriptor::planes
AVDRMPlaneDescriptor planes[AV_DRM_MAX_PLANES]
Array of planes in this layer.
Definition: hwcontext_drm.h:110
FF_VK_DEFAULT_USAGE_FLAGS
#define FF_VK_DEFAULT_USAGE_FLAGS
Definition: vulkan.h:29
dummy
int dummy
Definition: motion.c:65
AVVkFrame::mem
VkDeviceMemory mem[AV_NUM_DATA_POINTERS]
Memory backing the images.
Definition: hwcontext_vulkan.h:229
ImageBuffer::flags
VkMemoryPropertyFlagBits flags
Definition: hwcontext_vulkan.c:3472
ImageBuffer::buf
VkBuffer buf
Definition: hwcontext_vulkan.c:3470
AVVulkanFramesContext
Allocated as AVHWFramesContext.hwctx, used to set pool-specific options.
Definition: hwcontext_vulkan.h:157
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
AVHWFramesConstraints::min_width
int min_width
The minimum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:471
vulkan_map_frame_to_mem
static int vulkan_map_frame_to_mem(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_vulkan.c:2485
AVCUDADeviceContextInternal::cuda_device
CUdevice cuda_device
Definition: hwcontext_cuda_internal.h:34
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
cqueue_create
static cqueue * cqueue_create(int size, int max_size)
Definition: af_dynaudnorm.c:179
vulkan_frame_free
static void vulkan_frame_free(void *opaque, uint8_t *data)
Definition: hwcontext_vulkan.c:1759
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:443
VulkanDevicePriv::device_features_1_1
VkPhysicalDeviceVulkan11Features device_features_1_1
Definition: hwcontext_vulkan.c:90
VulkanDevicePriv::props
VkPhysicalDeviceProperties2 props
Definition: hwcontext_vulkan.c:85
AVDRMPlaneDescriptor::offset
ptrdiff_t offset
Offset within that object of this plane.
Definition: hwcontext_drm.h:83
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:462
wait_start_exec_ctx
static int wait_start_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
Definition: hwcontext_vulkan.c:1169
submit_exec_ctx
static int submit_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd, VkSubmitInfo *s_info, AVVkFrame *f, int synchronous)
Definition: hwcontext_vulkan.c:1242
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:471
AV_VK_FRAME_FLAG_NONE
@ AV_VK_FRAME_FLAG_NONE
Definition: hwcontext_vulkan.h:146
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
AVVulkanDeviceContext::nb_graphics_queues
int nb_graphics_queues
Definition: hwcontext_vulkan.h:107
vulkan_map_to
static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_vulkan.c:3257
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:60
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
AVHWFramesContext::pool
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:190
av_fast_realloc
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:495
VulkanDeviceSelection::pci_device
uint32_t pci_device
Definition: hwcontext_vulkan.c:757
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:472
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
ff_vk_load_functions
static int ff_vk_load_functions(AVHWDeviceContext *ctx, FFVulkanFunctions *vk, uint64_t extensions_mask, int has_inst, int has_dev)
Function loader.
Definition: vulkan_loader.h:80
AV_PIX_FMT_0BGR32
#define AV_PIX_FMT_0BGR32
Definition: pixfmt.h:437
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:179
VulkanQueueCtx
Definition: hwcontext_vulkan.c:60
from
const char * from
Definition: jacosubdec.c:66
FF_VK_EXT_EXTERNAL_HOST_MEMORY
@ FF_VK_EXT_EXTERNAL_HOST_MEMORY
Definition: vulkan_functions.h:34
AV_PIX_FMT_YUVA444P12
#define AV_PIX_FMT_YUVA444P12
Definition: pixfmt.h:500
pixfmt_is_supported
static int pixfmt_is_supported(AVHWDeviceContext *dev_ctx, enum AVPixelFormat p, int linear)
Definition: hwcontext_vulkan.c:275
vulkan_frames_uninit
static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
Definition: hwcontext_vulkan.c:2252
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:470
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDRMObjectDescriptor::fd
int fd
DRM PRIME fd for the object.
Definition: hwcontext_drm.h:52
check_extensions
static int check_extensions(AVHWDeviceContext *ctx, int dev, AVDictionary *opts, const char *const **dst, uint32_t *num, int debug)
Definition: hwcontext_vulkan.c:427
VulkanDeviceSelection::index
int index
Definition: hwcontext_vulkan.c:759
vulkan_transfer_get_formats
static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_vulkan.c:2431
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_PIX_FMT_GRAYF32
#define AV_PIX_FMT_GRAYF32
Definition: pixfmt.h:491
LIBAVUTIL_VERSION_MINOR
#define LIBAVUTIL_VERSION_MINOR
Definition: version.h:82
ImageBuffer::mem
VkDeviceMemory mem
Definition: hwcontext_vulkan.c:3471
get_req_buffer_size
static size_t get_req_buffer_size(VulkanDevicePriv *p, int *stride, int height)
Definition: hwcontext_vulkan.c:3492
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:64
VulkanFramesPriv::conv_ctx
VulkanExecCtx conv_ctx
Definition: hwcontext_vulkan.c:118
if
if(ret)
Definition: filter_design.txt:179
av_vkfmt_from_pixfmt
const VkFormat * av_vkfmt_from_pixfmt(enum AVPixelFormat p)
Returns the format of each image up to the number of planes for a given sw_format.
Definition: hwcontext_vulkan.c:242
AVVulkanDeviceContext
Main Vulkan context, allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_vulkan.h:42
opts
AVDictionary * opts
Definition: movenc.c:50
PREP_MODE_WRITE
@ PREP_MODE_WRITE
Definition: hwcontext_vulkan.c:1904
AV_PIX_FMT_RGBA64
#define AV_PIX_FMT_RGBA64
Definition: pixfmt.h:449
pick_queue_family
static int pick_queue_family(VkQueueFamilyProperties *qf, uint32_t num_qf, VkQueueFlagBits flags)
Definition: hwcontext_vulkan.c:903
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVVulkanDeviceContext::nb_enabled_dev_extensions
int nb_enabled_dev_extensions
Definition: hwcontext_vulkan.h:97
AVHWDeviceInternal::priv
void * priv
Definition: hwcontext_internal.h:105
vk_ret2str
static const char * vk_ret2str(VkResult res)
Definition: hwcontext_vulkan.c:364
AVVkFrameInternal
Definition: hwcontext_vulkan.c:128
VulkanDevicePriv::debug_ctx
VkDebugUtilsMessengerEXT debug_ctx
Definition: hwcontext_vulkan.c:98
AVVulkanFramesContext::alloc_pnext
void * alloc_pnext[AV_NUM_DATA_POINTERS]
Extension data for memory allocation.
Definition: hwcontext_vulkan.h:190
setup_queue_families
static int setup_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
Definition: hwcontext_vulkan.c:926
LIBAVUTIL_VERSION_MAJOR
#define LIBAVUTIL_VERSION_MAJOR
Definition: version.h:81
VulkanMapping
Definition: hwcontext_vulkan.c:2446
unmap_buffers
static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, int nb_buffers, int flush)
Definition: hwcontext_vulkan.c:3650
ff_hwcontext_type_vulkan
const HWContextType ff_hwcontext_type_vulkan
Definition: hwcontext_vulkan.c:4138
hwcontext_vulkan.h
AVVulkanDeviceContext::enabled_inst_extensions
const char *const * enabled_inst_extensions
Enabled instance extensions.
Definition: hwcontext_vulkan.h:85
VulkanDevicePriv::device_features_1_2
VkPhysicalDeviceVulkan12Features device_features_1_2
Definition: hwcontext_vulkan.c:91
AVVulkanFramesContext::usage
VkImageUsageFlagBits usage
Defines extra usage of output frames.
Definition: hwcontext_vulkan.h:170
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:258
CASE
#define CASE(VAL)
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
AVVulkanDeviceContext::queue_family_tx_index
int queue_family_tx_index
Queue family index for transfer operations and the number of queues enabled.
Definition: hwcontext_vulkan.h:113
alloc_bind_mem
static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f, void *alloc_pnext, size_t alloc_pnext_stride)
Definition: hwcontext_vulkan.c:1783
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
fp
#define fp
Definition: regdef.h:44
exp
int8_t exp
Definition: eval.c:72
create_buf
static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf, VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags, size_t size, uint32_t req_memory_bits, int host_mapped, void *create_pnext, void *alloc_pnext)
Definition: hwcontext_vulkan.c:3501
VulkanFramesPriv
Definition: hwcontext_vulkan.c:116
index
int index
Definition: gxfenc.c:89
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
VulkanDeviceSelection
Definition: hwcontext_vulkan.c:753
AV_DRM_MAX_PLANES
@ AV_DRM_MAX_PLANES
The maximum number of layers/planes in a DRM frame.
Definition: hwcontext_drm.h:39
FF_VK_EXT_DRM_MODIFIER_FLAGS
@ FF_VK_EXT_DRM_MODIFIER_FLAGS
Definition: vulkan_functions.h:31
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
AVDRMFrameDescriptor::layers
AVDRMLayerDescriptor layers[AV_DRM_MAX_PLANES]
Array of layers in the frame.
Definition: hwcontext_drm.h:149
usage
const char * usage
Definition: floatimg_cmp.c:60
create_frame
static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame, VkImageTiling tiling, VkImageUsageFlagBits usage, void *create_pnext)
Definition: hwcontext_vulkan.c:2021
AVVkFrame::size
size_t size[AV_NUM_DATA_POINTERS]
Definition: hwcontext_vulkan.h:230
vulkan_pool_alloc
static AVBufferRef * vulkan_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_vulkan.c:2189
PrepMode
PrepMode
Definition: hwcontext_vulkan.c:1903
f
f
Definition: af_crystalizer.c:122
AVCUDADeviceContext::internal
AVCUDADeviceContextInternal * internal
Definition: hwcontext_cuda.h:45
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:509
AV_PIX_FMT_FLAG_RGB
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:136
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:762
AVVkFrame
Definition: hwcontext_vulkan.h:213
av_vk_frame_alloc
AVVkFrame * av_vk_frame_alloc(void)
Allocates a single AVVkFrame and initializes everything as 0.
Definition: hwcontext_vulkan.c:4133
VulkanFramesPriv::upload_ctx
VulkanExecCtx upload_ctx
Definition: hwcontext_vulkan.c:121
vulkan_device_free
static void vulkan_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_vulkan.c:1287
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
VulkanQueueCtx::buf_deps_alloc_size
int buf_deps_alloc_size
Definition: hwcontext_vulkan.c:68
AV_PIX_FMT_GBRPF32
#define AV_PIX_FMT_GBRPF32
Definition: pixfmt.h:488
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:464
vulkan_unmap_frame
static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
Definition: hwcontext_vulkan.c:2451
transfer_image_buf
static int transfer_image_buf(AVHWFramesContext *hwfc, const AVFrame *f, AVBufferRef **bufs, size_t *buf_offsets, const int *buf_stride, int w, int h, enum AVPixelFormat pix_fmt, int to_buf)
Definition: hwcontext_vulkan.c:3697
AV_PIX_FMT_RGB48
#define AV_PIX_FMT_RGB48
Definition: pixfmt.h:445
size
int size
Definition: twinvq_data.h:10344
vulkan_transfer_data_from
static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vulkan.c:4102
AV_NUM_DATA_POINTERS
#define AV_NUM_DATA_POINTERS
Definition: frame.h:331
VulkanDevicePriv::use_linear_images
int use_linear_images
Definition: hwcontext_vulkan.c:104
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:466
FF_VK_EXT_EXTERNAL_DMABUF_MEMORY
@ FF_VK_EXT_EXTERNAL_DMABUF_MEMORY
Definition: vulkan_functions.h:30
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:417
AV_PIX_FMT_NV16
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:191
VulkanDeviceSelection::vendor_id
uint32_t vendor_id
Definition: hwcontext_vulkan.c:758
AVDRMObjectDescriptor::size
size_t size
Total size of the object.
Definition: hwcontext_drm.h:58
height
#define height
ImageBuffer
Definition: hwcontext_vulkan.c:3469
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:167
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:498
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AVHWFramesConstraints::max_width
int max_width
The maximum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:478
VulkanOptExtension
Definition: hwcontext_vulkan.c:336
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:256
CHECK_CU
#define CHECK_CU(x)
Definition: cuviddec.c:116
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
create_exec_ctx
static int create_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd, int queue_family_index, int num_queues)
Definition: hwcontext_vulkan.c:1060
VulkanDevicePriv::dev_is_nvidia
int dev_is_nvidia
Definition: hwcontext_vulkan.c:110
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
vulkan_free_internal
static void vulkan_free_internal(AVVkFrame *f)
Definition: hwcontext_vulkan.c:1721
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
add_buf_dep_exec_ctx
static int add_buf_dep_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd, AVBufferRef *const *deps, int nb_deps)
Definition: hwcontext_vulkan.c:1212
COPY_FEATURE
#define COPY_FEATURE(DST, NAME)
vulkan.h
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
AVDRMFrameDescriptor::objects
AVDRMObjectDescriptor objects[AV_DRM_MAX_PLANES]
Array of objects making up the frame.
Definition: hwcontext_drm.h:141
AVCUDADeviceContextInternal::cuda_dl
CudaFunctions * cuda_dl
Definition: hwcontext_cuda_internal.h:32
VulkanQueueCtx::buf_deps
AVBufferRef ** buf_deps
Definition: hwcontext_vulkan.c:66
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
FF_VK_EXT_EXTERNAL_FD_MEMORY
@ FF_VK_EXT_EXTERNAL_FD_MEMORY
Definition: vulkan_functions.h:32
AVDRMObjectDescriptor::format_modifier
uint64_t format_modifier
Format modifier applied to the object (DRM_FORMAT_MOD_*).
Definition: hwcontext_drm.h:65
VkFormat
enum VkFormat VkFormat
Definition: hwcontext_stub.c:25
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
weights
static const int weights[]
Definition: hevc_pel.c:32
AV_PIX_FMT_NV24
@ AV_PIX_FMT_NV24
planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:368
plane_info
Definition: vf_edgedetect.c:52
vulkan_frames_init
static int vulkan_frames_init(AVHWFramesContext *hwfc)
Definition: hwcontext_vulkan.c:2267
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVVulkanDeviceContext::nb_encode_queues
int nb_encode_queues
Definition: hwcontext_vulkan.h:129
get_buf_exec_ctx
static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
Definition: hwcontext_vulkan.c:1155
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:516
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
free_buf
static void free_buf(void *opaque, uint8_t *data)
Definition: hwcontext_vulkan.c:3476
VulkanDeviceSelection::has_uuid
int has_uuid
Definition: hwcontext_vulkan.c:755
hwcontext_drm.h
VulkanMapping::frame
AVVkFrame * frame
Definition: hwcontext_vulkan.c:2447
AVDRMPlaneDescriptor::object_index
int object_index
Index of the object containing this plane in the objects array of the enclosing frame descriptor.
Definition: hwcontext_drm.h:79
AV_PIX_FMT_NV21
@ AV_PIX_FMT_NV21
as above, but U and V bytes are swapped
Definition: pixfmt.h:90
AV_PIX_FMT_BGR565
#define AV_PIX_FMT_BGR565
Definition: pixfmt.h:451
vkfmts
const VkFormat vkfmts[4]
Definition: hwcontext_vulkan.c:167
ff_hwframe_map_replace
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
Definition: hwcontext.c:950
vk_dbg_callback
static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT *data, void *priv)
Definition: hwcontext_vulkan.c:404
AV_PIX_FMT_NV42
@ AV_PIX_FMT_NV42
as above, but U and V bytes are swapped
Definition: pixfmt.h:369
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
mod
static int mod(int a, int b)
Modulo operation with only positive remainders.
Definition: vf_v360.c:750
AV_PIX_FMT_P016
#define AV_PIX_FMT_P016
Definition: pixfmt.h:510
AV_PIX_FMT_RGB565
#define AV_PIX_FMT_RGB565
Definition: pixfmt.h:446
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
create_instance
static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
Definition: hwcontext_vulkan.c:666
stride
#define stride
Definition: h264pred_template.c:537
AVVkFrame::sem
VkSemaphore sem[AV_NUM_DATA_POINTERS]
Synchronization timeline semaphores, one for each sw_format plane.
Definition: hwcontext_vulkan.h:249
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
AVCUDADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_cuda.h:42
hwcontext_vaapi.h
AVDRMLayerDescriptor::format
uint32_t format
Format of the layer (DRM_FORMAT_*).
Definition: hwcontext_drm.h:100
vk_find_struct
static const void * vk_find_struct(const void *chain, VkStructureType stype)
Definition: hwcontext_vulkan.c:250
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
cuda_check.h
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
vulkan_get_buffer
static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
Definition: hwcontext_vulkan.c:2417
unref_exec_ctx_deps
static void unref_exec_ctx_deps(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
Definition: hwcontext_vulkan.c:1160
AVHWFramesConstraints::max_height
int max_height
Definition: hwcontext.h:479
AVVkFrame::internal
struct AVVkFrameInternal * internal
Internal data.
Definition: hwcontext_vulkan.h:261
get_plane_wh
static void get_plane_wh(int *w, int *h, enum AVPixelFormat format, int frame_w, int frame_h, int plane)
Definition: hwcontext_vulkan.c:2004
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:463
SIZE_SPECIFIER
#define SIZE_SPECIFIER
Definition: internal.h:150
map_buffers
static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[], int nb_buffers, int invalidate)
Definition: hwcontext_vulkan.c:3592
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:678
vulkan_device_create
static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_vulkan.c:1530
AVFrame::height
int height
Definition: frame.h:402
av_image_copy_plane_uc_from
void av_image_copy_plane_uc_from(uint8_t *dst, ptrdiff_t dst_linesize, const uint8_t *src, ptrdiff_t src_linesize, ptrdiff_t bytewidth, int height)
Copy image data located in uncacheable (e.g.
Definition: imgutils.c:359
AVHWFramesConstraints::min_height
int min_height
Definition: hwcontext.h:472
VulkanQueueCtx::was_synchronous
int was_synchronous
Definition: hwcontext_vulkan.c:63
RELEASE_PROPS
#define RELEASE_PROPS(props, count)
Definition: hwcontext_vulkan.c:158
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_PIX_FMT_YUVA422P12
#define AV_PIX_FMT_YUVA422P12
Definition: pixfmt.h:499
AVVulkanDeviceContext::nb_comp_queues
int nb_comp_queues
Definition: hwcontext_vulkan.h:121
AV_PIX_FMT_GBRAPF32
#define AV_PIX_FMT_GBRAPF32
Definition: pixfmt.h:489
LIBAVUTIL_VERSION_MICRO
#define LIBAVUTIL_VERSION_MICRO
Definition: version.h:83
find_device
static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
Definition: hwcontext_vulkan.c:774
FF_VK_EXT_DEBUG_UTILS
@ FF_VK_EXT_DEBUG_UTILS
Definition: vulkan_functions.h:35
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
VulkanDevicePriv::dev_is_intel
int dev_is_intel
Definition: hwcontext_vulkan.c:113
optional_instance_exts
static const VulkanOptExtension optional_instance_exts[]
Definition: hwcontext_vulkan.c:341
AV_PIX_FMT_FLAG_PLANAR
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:132
PREP_MODE_EXTERNAL_IMPORT
@ PREP_MODE_EXTERNAL_IMPORT
Definition: hwcontext_vulkan.c:1906
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVVulkanFramesContext::tiling
VkImageTiling tiling
Controls the tiling of allocated frames.
Definition: hwcontext_vulkan.h:164
VulkanExecCtx
Definition: hwcontext_vulkan.c:71
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:508
vk_pixfmt_map
static const struct @334 vk_pixfmt_map[]
AVVkFrame::sem_value
uint64_t sem_value[AV_NUM_DATA_POINTERS]
Up to date semaphore value at which each image becomes accessible.
Definition: hwcontext_vulkan.h:256
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
desc
const char * desc
Definition: libsvtav1.c:83
AVVulkanDeviceContext::enabled_dev_extensions
const char *const * enabled_dev_extensions
Enabled device extensions.
Definition: hwcontext_vulkan.h:96
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
flush
void(* flush)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:367
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:534
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:528
FFVulkanExtensions
FFVulkanExtensions
Definition: vulkan_functions.h:29
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
AVVulkanDeviceContext::act_dev
VkDevice act_dev
Active device.
Definition: hwcontext_vulkan.h:68
hwcontext_internal.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
AVVulkanDeviceContext::nb_enabled_inst_extensions
int nb_enabled_inst_extensions
Definition: hwcontext_vulkan.h:86
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
AVDictionaryEntry
Definition: dict.h:89
PREP_MODE_EXTERNAL_EXPORT
@ PREP_MODE_EXTERNAL_EXPORT
Definition: hwcontext_vulkan.c:1905
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
VulkanDevicePriv::mprops
VkPhysicalDeviceMemoryProperties mprops
Definition: hwcontext_vulkan.c:86
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
FF_VK_EXT_EXTERNAL_FD_SEM
@ FF_VK_EXT_EXTERNAL_FD_SEM
Definition: vulkan_functions.h:33
vulkan_device_create_internal
static int vulkan_device_create_internal(AVHWDeviceContext *ctx, VulkanDeviceSelection *dev_select, AVDictionary *opts, int flags)
Definition: hwcontext_vulkan.c:1310
AVVulkanDeviceContext::nb_tx_queues
int nb_tx_queues
Definition: hwcontext_vulkan.h:114
vk_dev_type
static const char * vk_dev_type(enum VkPhysicalDeviceType type)
Definition: hwcontext_vulkan.c:762
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:515
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
ImageBuffer::mapped_mem
int mapped_mem
Definition: hwcontext_vulkan.c:3473
AVDRMPlaneDescriptor::pitch
ptrdiff_t pitch
Pitch (linesize) of this plane.
Definition: hwcontext_drm.h:87
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
VulkanQueueCtx::queue
VkQueue queue
Definition: hwcontext_vulkan.c:62
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
VulkanDevicePriv::contiguous_planes
int contiguous_planes
Definition: hwcontext_vulkan.c:107
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
h
h
Definition: vp9dsp_template.c:2038
pixfmt
enum AVPixelFormat pixfmt
Definition: hwcontext_vulkan.c:166
AVVulkanDeviceContext::device_features
VkPhysicalDeviceFeatures2 device_features
This structure should be set to the set of features that present and enabled during device creation.
Definition: hwcontext_vulkan.h:76
AVDictionaryEntry::value
char * value
Definition: dict.h:91
avstring.h
AVDRMDeviceContext
DRM device.
Definition: hwcontext_drm.h:157
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:403
ADD_VAL_TO_LIST
#define ADD_VAL_TO_LIST(list, count, val)
Definition: hwcontext_vulkan.c:144
AVDRMFrameDescriptor::nb_objects
int nb_objects
Number of DRM objects making up this frame.
Definition: hwcontext_drm.h:137
AVVulkanDeviceContext::queue_family_encode_index
int queue_family_encode_index
Queue family index for video encode ops, and the amount of queues enabled.
Definition: hwcontext_vulkan.h:128
HWMapDescriptor
Definition: hwcontext_internal.h:132
FFVulkanFunctions
Definition: vulkan_functions.h:175
VulkanFramesPriv::modifier_info
VkImageDrmFormatModifierListCreateInfoEXT * modifier_info
Definition: hwcontext_vulkan.c:125
VulkanDeviceSelection::name
const char * name
Definition: hwcontext_vulkan.c:756
AV_HWDEVICE_TYPE_DRM
@ AV_HWDEVICE_TYPE_DRM
Definition: hwcontext.h:36
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:166
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2808