Go to the documentation of this file.
19 #define VK_NO_PROTOTYPES
20 #define VK_ENABLE_BETA_EXTENSIONS
25 #if (SDL_VERSION_ATLEAST(2, 0, 6) && CONFIG_LIBPLACEBO)
27 #include <libplacebo/config.h>
28 #define HAVE_VULKAN_RENDERER (PL_API_VER >= 278)
30 #define HAVE_VULKAN_RENDERER 0
33 #if HAVE_VULKAN_RENDERER
35 #if defined(_WIN32) && !defined(VK_USE_PLATFORM_WIN32_KHR)
36 #define VK_USE_PLATFORM_WIN32_KHR
39 #include <libplacebo/vulkan.h>
40 #include <libplacebo/utils/frame_queue.h>
41 #include <libplacebo/utils/libav.h>
42 #include <SDL_vulkan.h>
63 #if HAVE_VULKAN_RENDERER
65 typedef struct RendererContext {
69 pl_vk_inst placebo_instance;
70 pl_vulkan placebo_vulkan;
71 pl_swapchain swapchain;
72 VkSurfaceKHR vk_surface;
83 PFN_vkGetInstanceProcAddr get_proc_addr;
90 static void vk_log_cb(
void *log_priv,
enum pl_log_level
level,
93 static const int level_map[] = {
110 VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME,
111 VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
112 VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
113 VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME,
114 VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME,
115 VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
116 VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME,
119 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
120 VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
121 VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
122 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
123 VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
125 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
126 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
130 VK_KHR_VIDEO_QUEUE_EXTENSION_NAME,
131 VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME,
132 VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME,
133 VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME,
134 "VK_MESA_video_decode_av1",
144 static void hwctx_lock_queue(
void *priv, uint32_t qf, uint32_t qidx)
151 static void hwctx_unlock_queue(
void *priv, uint32_t qf, uint32_t qidx)
158 static int add_instance_extension(
const char **ext,
unsigned num_ext,
162 const char *inst_ext_key =
"instance_extensions";
165 char *ext_list =
NULL;
169 for (
int i = 0;
i < num_ext;
i++) {
189 static int add_device_extension(
const AVDictionary *opt,
192 const char *dev_ext_key =
"device_extensions";
195 char *ext_list =
NULL;
199 av_bprintf(&buf,
"%s", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
200 for (
int i = 0;
i < pl_vulkan_num_recommended_extensions;
i++)
201 av_bprintf(&buf,
"+%s", pl_vulkan_recommended_extensions[
i]);
213 static const char *select_device(
const AVDictionary *opt)
224 const char **ext,
unsigned num_ext,
227 RendererContext *
ctx = (RendererContext *)
renderer;
233 ret = add_instance_extension(ext, num_ext, opt, &dict);
236 ret = add_device_extension(opt, &dict);
243 select_device(opt), dict, 0);
255 "hwdevice and SDL use different get_proc_addr. "
256 "Try -vulkan_params create_by_placebo=1\n");
262 ctx->placebo_vulkan = pl_vulkan_import(
ctx->vk_log,
263 pl_vulkan_import_params(
264 .instance = hwctx->
inst,
271 .lock_queue = hwctx_lock_queue,
272 .unlock_queue = hwctx_unlock_queue,
275 .index = hwctx->queue_family_index,
276 .count = hwctx->nb_graphics_queues,
279 .index = hwctx->queue_family_comp_index,
280 .count = hwctx->nb_comp_queues,
283 .index = hwctx->queue_family_tx_index,
284 .count = hwctx->nb_tx_queues,
287 if (!
ctx->placebo_vulkan)
294 uint32_t queue_family, uint32_t
index)
297 pl_vulkan vk =
ctx->placebo_vulkan;
298 vk->lock_queue(vk, queue_family,
index);
302 uint32_t queue_family,
306 pl_vulkan vk =
ctx->placebo_vulkan;
307 vk->unlock_queue(vk, queue_family,
index);
312 RendererContext *
ctx = (RendererContext *)
renderer;
313 VkQueueFamilyProperties *queue_family_prop =
NULL;
314 uint32_t num_queue_family_prop = 0;
315 PFN_vkGetPhysicalDeviceQueueFamilyProperties get_queue_family_prop;
316 PFN_vkGetInstanceProcAddr get_proc_addr =
ctx->get_proc_addr;
320 get_queue_family_prop = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)
321 get_proc_addr(
ctx->placebo_instance->instance,
322 "vkGetPhysicalDeviceQueueFamilyProperties");
323 get_queue_family_prop(
ctx->placebo_vulkan->phys_device,
324 &num_queue_family_prop,
NULL);
325 if (!num_queue_family_prop)
328 queue_family_prop =
av_calloc(num_queue_family_prop,
329 sizeof(*queue_family_prop));
330 if (!queue_family_prop)
333 get_queue_family_prop(
ctx->placebo_vulkan->phys_device,
334 &num_queue_family_prop,
337 for (
int i = 0;
i < num_queue_family_prop;
i++) {
338 if (queue_family_prop[
i].queueFlags & VK_QUEUE_VIDEO_DECODE_BIT_KHR) {
340 *count = queue_family_prop[
i].queueCount;
350 const char **ext,
unsigned num_ext,
353 RendererContext *
ctx = (RendererContext *)
renderer;
360 ctx->get_proc_addr = SDL_Vulkan_GetVkGetInstanceProcAddr();
362 ctx->placebo_instance = pl_vk_inst_create(
ctx->vk_log, pl_vk_inst_params(
363 .get_proc_addr =
ctx->get_proc_addr,
364 .
debug = enable_debug(opt),
366 .num_extensions = num_ext
368 if (!
ctx->placebo_instance) {
371 ctx->inst =
ctx->placebo_instance->instance;
373 ctx->placebo_vulkan = pl_vulkan_create(
ctx->vk_log, pl_vulkan_params(
374 .instance =
ctx->placebo_instance->instance,
375 .get_proc_addr =
ctx->placebo_instance->get_proc_addr,
376 .surface =
ctx->vk_surface,
377 .allow_software =
false,
380 .extra_queues = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
381 .device_name = select_device(opt),
383 if (!
ctx->placebo_vulkan)
386 if (!
ctx->hw_device_ref) {
393 vk_dev_ctx = device_ctx->
hwctx;
399 vk_dev_ctx->
inst =
ctx->placebo_instance->instance;
400 vk_dev_ctx->
phys_dev =
ctx->placebo_vulkan->phys_device;
401 vk_dev_ctx->
act_dev =
ctx->placebo_vulkan->device;
437 unsigned num_ext = 0;
438 const char **ext =
NULL;
440 struct pl_log_params vk_log_params = {
442 .log_level = PL_LOG_DEBUG,
445 RendererContext *
ctx = (RendererContext *)
renderer;
448 ctx->vk_log = pl_log_create(PL_API_VER, &vk_log_params);
450 if (!SDL_Vulkan_GetInstanceExtensions(
window, &num_ext,
NULL)) {
462 SDL_Vulkan_GetInstanceExtensions(
window, &num_ext, ext);
466 ret = create_vk_by_placebo(
renderer, ext, num_ext, opt);
468 ret = create_vk_by_hwcontext(
renderer, ext, num_ext, opt);
472 if (!SDL_Vulkan_CreateSurface(
window,
ctx->inst, &
ctx->vk_surface)) {
477 ctx->swapchain = pl_vulkan_create_swapchain(
479 pl_vulkan_swapchain_params(
480 .surface =
ctx->vk_surface,
481 .present_mode = VK_PRESENT_MODE_FIFO_KHR));
482 if (!
ctx->swapchain) {
487 SDL_Vulkan_GetDrawableSize(
window, &
w, &
h);
488 pl_swapchain_resize(
ctx->swapchain, &
w, &
h);
490 ctx->renderer = pl_renderer_create(
ctx->vk_log,
ctx->placebo_vulkan->gpu);
491 if (!
ctx->renderer) {
497 if (!
ctx->vk_frame) {
511 RendererContext *
ctx = (RendererContext *)
renderer;
513 *dev =
ctx->hw_device_ref;
519 RendererContext *
ctx = (RendererContext *)
renderer;
521 frame->hw_frames_ctx->data;
526 if (
ctx->hw_frame_ref) {
529 if (hw_frame->width ==
frame->width &&
530 hw_frame->height ==
frame->height &&
531 hw_frame->sw_format == src_hw_frame->
sw_format)
537 if (!
ctx->constraints) {
540 if (!
ctx->constraints)
546 if ((
ctx->constraints->max_width &&
547 ctx->constraints->max_width <
frame->width) ||
548 (
ctx->constraints->max_height &&
549 ctx->constraints->max_height <
frame->height) ||
550 (
ctx->constraints->min_width &&
551 ctx->constraints->min_width >
frame->width) ||
552 (
ctx->constraints->min_height &&
553 ctx->constraints->min_height >
frame->height))
556 if (
ctx->constraints->valid_sw_formats) {
559 if (*sw_formats == src_hw_frame->
sw_format)
568 if (!
ctx->hw_frame_ref)
573 hw_frame->sw_format = src_hw_frame->
sw_format;
574 hw_frame->width =
frame->width;
575 hw_frame->height =
frame->height;
578 vk_frame_ctx = hw_frame->hwctx;
591 &
ctx->transfer_formats, 0);
596 static inline int check_hw_transfer(RendererContext *
ctx,
AVFrame *
frame)
598 if (!
ctx->hw_frame_ref || !
ctx->transfer_formats)
602 if (
ctx->transfer_formats[
i] ==
frame->format)
608 static inline int move_to_output_frame(RendererContext *
ctx,
AVFrame *
frame)
620 RendererContext *
ctx = (RendererContext *)
renderer;
623 if (use_hw_frame && !
ctx->hw_frame_ref)
634 return move_to_output_frame(
ctx,
frame);
643 RendererContext *
ctx = (RendererContext *)
renderer;
646 if (use_hw_frame && !check_hw_transfer(
ctx,
frame))
654 return move_to_output_frame(
ctx,
frame);
666 if (!
frame->hw_frames_ctx)
676 for (
int use_hw = 1; use_hw >=0; use_hw--) {
695 struct pl_swapchain_frame swap_frame = {0};
696 struct pl_frame pl_frame = {0};
697 struct pl_frame target = {0};
698 RendererContext *
ctx = (RendererContext *)
renderer;
700 struct pl_color_space hint = {0};
706 if (!pl_map_avframe_ex(
ctx->placebo_vulkan->gpu, &pl_frame, pl_avframe_params(
713 pl_color_space_from_avframe(&hint,
frame);
714 pl_swapchain_colorspace_hint(
ctx->swapchain, &hint);
715 if (!pl_swapchain_start_frame(
ctx->swapchain, &swap_frame)) {
721 pl_frame_from_swapchain(&target, &swap_frame);
722 if (!pl_render_image(
ctx->renderer, &pl_frame, &target,
723 &pl_render_default_params)) {
729 if (!pl_swapchain_submit_frame(
ctx->swapchain)) {
734 pl_swapchain_swap_buffers(
ctx->swapchain);
737 pl_unmap_avframe(
ctx->placebo_vulkan->gpu, &pl_frame);
743 RendererContext *
ctx = (RendererContext *)
renderer;
752 RendererContext *
ctx = (RendererContext *)
renderer;
753 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
760 if (
ctx->placebo_vulkan) {
762 pl_tex_destroy(
ctx->placebo_vulkan->gpu, &
ctx->tex[
i]);
763 pl_renderer_destroy(&
ctx->renderer);
764 pl_swapchain_destroy(&
ctx->swapchain);
765 pl_vulkan_destroy(&
ctx->placebo_vulkan);
768 if (
ctx->vk_surface) {
769 vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)
770 ctx->get_proc_addr(
ctx->inst,
"vkDestroySurfaceKHR");
771 vkDestroySurfaceKHR(
ctx->inst,
ctx->vk_surface,
NULL);
772 ctx->vk_surface = VK_NULL_HANDLE;
776 pl_vk_inst_destroy(&
ctx->placebo_instance);
778 pl_log_destroy(&
ctx->vk_log);
781 static const AVClass vulkan_renderer_class = {
796 renderer->class = &vulkan_renderer_class;
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
int(* create)(VkRenderer *renderer, SDL_Window *window, AVDictionary *dict)
VkPhysicalDevice phys_dev
Physical device.
#define AV_LOG_WARNING
Something somehow does not look correct.
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
AVPixelFormat
Pixel format.
static int convert_frame(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Convert a frame from linear RGB to logspace LAB, and accumulate channel totals for each row Convert f...
@ AV_VK_FRAME_FLAG_DISABLE_MULTIPLANE
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static void destroy(struct ResampleContext **c)
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
#define AV_LOG_QUIET
Print no output.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
PFN_vkGetInstanceProcAddr get_proc_addr
Pointer to a vkGetInstanceProcAddr loading function.
static const VulkanOptExtension optional_device_exts[]
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
void * user_opaque
Arbitrary user data, to be used e.g.
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
int vk_renderer_create(VkRenderer *renderer, SDL_Window *window, AVDictionary *opt)
VkInstance inst
Vulkan instance.
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
static bool map_frame(pl_gpu gpu, pl_tex *tex, const struct pl_source_frame *src, struct pl_frame *out)
void vk_renderer_destroy(VkRenderer *renderer)
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
@ AV_PIX_FMT_VULKAN
Vulkan hardware images.
@ AV_HWDEVICE_TYPE_VULKAN
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
static SDL_Window * window
attribute_deprecated int nb_tx_queues
Allocated as AVHWFramesContext.hwctx, used to set pool-specific options.
#define AV_BPRINT_SIZE_AUTOMATIC
attribute_deprecated int queue_family_decode_index
Queue family index for video decode ops, and the amount of queues enabled.
#define AV_DICT_DONT_STRDUP_VAL
Take ownership of a value that's been allocated with av_malloc() or another memory allocation functio...
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
VkRenderer * vk_get_renderer(void)
#define FF_ARRAY_ELEMS(a)
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
static int decode_index(SGAVideoContext *s, AVFrame *frame)
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
int vk_renderer_get_hw_dev(VkRenderer *renderer, AVBufferRef **dev)
int vk_renderer_display(VkRenderer *renderer, AVFrame *frame)
static SDL_Renderer * renderer
Main Vulkan context, allocated as AVHWDeviceContext.hwctx.
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
int nb_enabled_dev_extensions
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, double kaiser_beta, double precision, int cheby, int exact_rational)
void(* unlock_queue)(struct AVHWDeviceContext *ctx, uint32_t queue_family, uint32_t index)
Similar to lock_queue(), unlocks a queue.
attribute_deprecated int nb_decode_queues
const char * av_default_item_name(void *ptr)
Return the context name.
attribute_deprecated int queue_family_tx_index
Queue family index for transfer operations and the number of queues enabled.
const char *const * enabled_inst_extensions
Enabled instance extensions.
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
int(* get_hw_dev)(VkRenderer *renderer, AVBufferRef **dev)
attribute_deprecated int queue_family_index
Queue family index for graphics operations, and the number of queues enabled for it.
#define AVERROR_EXTERNAL
Generic error in an external library.
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
void(* lock_queue)(struct AVHWDeviceContext *ctx, uint32_t queue_family, uint32_t index)
Locks a queue, preventing other threads from submitting any command buffers to this queue.
#define AV_LOG_INFO
Standard information.
#define i(width, name, range_min, range_max)
attribute_deprecated int queue_family_comp_index
Queue family index for compute operations and the number of queues enabled.
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
int vk_renderer_resize(VkRenderer *renderer, int width, int height)
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
void * av_calloc(size_t nmemb, size_t size)
This struct describes a set or pool of "hardware" frames (i.e.
#define AV_LOG_FATAL
Something went wrong and recovery is not possible.
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
void(* destroy)(VkRenderer *renderer)
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
void av_bprintf(AVBPrint *buf, const char *fmt,...)
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
int debug
Flags to enable debugging.
attribute_deprecated int nb_graphics_queues
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
const char *const * enabled_dev_extensions
Enabled device extensions.
A reference to a data buffer.
VkDevice act_dev
Active device.
int nb_enabled_inst_extensions
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
VkPhysicalDeviceFeatures2 device_features
This structure should be set to the set of features that present and enabled during device creation.
int(* display)(VkRenderer *renderer, AVFrame *frame)
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
int(* resize)(VkRenderer *renderer, int width, int height)
@ AV_HWFRAME_TRANSFER_DIRECTION_TO
Transfer the data to the queried hw frame.
attribute_deprecated int nb_comp_queues