26 #elif CONFIG_LIBSHADERC
32 #define FN_CREATING(ctx, type, shortname, array, num) \
33 static av_always_inline type *create_ ##shortname(ctx *dctx) \
35 type **array, *sctx = av_mallocz(sizeof(*sctx)); \
39 array = av_realloc_array(dctx->array, sizeof(*dctx->array), dctx->num + 1);\
45 dctx->array = array; \
46 dctx->array[dctx->num++] = sctx; \
52 .r = VK_COMPONENT_SWIZZLE_IDENTITY,
53 .g = VK_COMPONENT_SWIZZLE_IDENTITY,
54 .b = VK_COMPONENT_SWIZZLE_IDENTITY,
55 .a = VK_COMPONENT_SWIZZLE_IDENTITY,
61 #define CASE(VAL) case VAL: return #VAL
69 CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
70 CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
71 CASE(VK_ERROR_INITIALIZATION_FAILED);
72 CASE(VK_ERROR_DEVICE_LOST);
73 CASE(VK_ERROR_MEMORY_MAP_FAILED);
74 CASE(VK_ERROR_LAYER_NOT_PRESENT);
75 CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
76 CASE(VK_ERROR_FEATURE_NOT_PRESENT);
77 CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
78 CASE(VK_ERROR_TOO_MANY_OBJECTS);
79 CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
80 CASE(VK_ERROR_FRAGMENTED_POOL);
81 CASE(VK_ERROR_SURFACE_LOST_KHR);
82 CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
83 CASE(VK_SUBOPTIMAL_KHR);
84 CASE(VK_ERROR_OUT_OF_DATE_KHR);
85 CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
86 CASE(VK_ERROR_VALIDATION_FAILED_EXT);
87 CASE(VK_ERROR_INVALID_SHADER_NV);
88 CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
89 CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
90 CASE(VK_ERROR_NOT_PERMITTED_EXT);
91 default:
return "Unknown error";
97 VkQueueFlagBits dev_family,
int nb_queues)
100 case VK_QUEUE_GRAPHICS_BIT:
104 case VK_QUEUE_COMPUTE_BIT:
108 case VK_QUEUE_TRANSFER_BIT:
112 case VK_QUEUE_VIDEO_ENCODE_BIT_KHR:
116 case VK_QUEUE_VIDEO_DECODE_BIT_KHR:
138 VkMemoryPropertyFlagBits req_flags,
void *alloc_extension,
139 VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
145 VkMemoryAllocateInfo alloc_info = {
146 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
147 .pNext = alloc_extension,
151 if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
152 req->size =
FFALIGN(req->size,
s->props.limits.minMemoryMapAlignment);
154 alloc_info.allocationSize = req->size;
158 for (
int i = 0;
i <
s->mprops.memoryTypeCount;
i++) {
160 if (!(req->memoryTypeBits & (1 <<
i)))
164 if ((
s->mprops.memoryTypes[
i].propertyFlags & req_flags) != req_flags)
178 alloc_info.memoryTypeIndex =
index;
180 ret = vk->AllocateMemory(
s->hwctx->act_dev, &alloc_info,
181 s->hwctx->alloc, mem);
182 if (
ret != VK_SUCCESS) {
188 *mem_flags |=
s->mprops.memoryTypes[
index].propertyFlags;
194 VkBufferUsageFlags
usage, VkMemoryPropertyFlagBits
flags)
201 VkBufferCreateInfo buf_spawn = {
202 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
205 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
210 VkBufferMemoryRequirementsInfo2 req_desc = {
211 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
213 VkMemoryDedicatedAllocateInfo ded_alloc = {
214 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
217 VkMemoryDedicatedRequirements ded_req = {
218 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
220 VkMemoryRequirements2 req = {
221 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
225 ret = vk->CreateBuffer(
s->hwctx->act_dev, &buf_spawn,
NULL, &buf->
buf);
226 if (
ret != VK_SUCCESS) {
232 req_desc.buffer = buf->
buf;
234 vk->GetBufferMemoryRequirements2(
s->hwctx->act_dev, &req_desc, &req);
237 use_ded_mem = ded_req.prefersDedicatedAllocation |
238 ded_req.requiresDedicatedAllocation;
240 ded_alloc.buffer = buf->
buf;
243 use_ded_mem ? &ded_alloc : (
void *)ded_alloc.pNext,
248 ret = vk->BindBufferMemory(
s->hwctx->act_dev, buf->
buf, buf->
mem, 0);
249 if (
ret != VK_SUCCESS) {
259 int nb_buffers,
int invalidate)
263 VkMappedMemoryRange *inval_list =
NULL;
266 for (
int i = 0;
i < nb_buffers;
i++) {
267 ret = vk->MapMemory(
s->hwctx->act_dev, buf[
i].
mem, 0,
268 VK_WHOLE_SIZE, 0, (
void **)&mem[
i]);
269 if (
ret != VK_SUCCESS) {
279 for (
int i = 0;
i < nb_buffers;
i++) {
280 const VkMappedMemoryRange ival_buf = {
281 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
282 .memory = buf[
i].
mem,
283 .size = VK_WHOLE_SIZE,
285 if (buf[
i].
flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
288 (++inval_count)*
sizeof(*inval_list));
291 inval_list[inval_count - 1] = ival_buf;
295 ret = vk->InvalidateMappedMemoryRanges(
s->hwctx->act_dev, inval_count,
297 if (
ret != VK_SUCCESS) {
313 VkMappedMemoryRange *flush_list =
NULL;
317 for (
int i = 0;
i < nb_buffers;
i++) {
318 const VkMappedMemoryRange flush_buf = {
319 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
320 .memory = buf[
i].
mem,
321 .size = VK_WHOLE_SIZE,
323 if (buf[
i].
flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
326 (++flush_count)*
sizeof(*flush_list));
329 flush_list[flush_count - 1] = flush_buf;
334 ret = vk->FlushMappedMemoryRanges(
s->hwctx->act_dev, flush_count,
336 if (
ret != VK_SUCCESS) {
343 for (
int i = 0;
i < nb_buffers;
i++)
344 vk->UnmapMemory(
s->hwctx->act_dev, buf[
i].
mem);
353 if (!buf || !
s->hwctx)
356 vk->DeviceWaitIdle(
s->hwctx->act_dev);
358 if (buf->
buf != VK_NULL_HANDLE)
359 vk->DestroyBuffer(
s->hwctx->act_dev, buf->
buf,
s->hwctx->alloc);
360 if (buf->
mem != VK_NULL_HANDLE)
361 vk->FreeMemory(
s->hwctx->act_dev, buf->
mem,
s->hwctx->alloc);
365 VkShaderStageFlagBits stage)
367 VkPushConstantRange *pc;
375 memset(pc, 0,
sizeof(*pc));
377 pc->stageFlags = stage;
393 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
394 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
395 .queueFamilyIndex = qf->queue_family,
397 VkCommandBufferAllocateInfo cbuf_create = {
398 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
399 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
400 .commandBufferCount = qf->nb_queues,
419 s->hwctx->alloc, &e->
pool);
420 if (
ret != VK_SUCCESS) {
426 cbuf_create.commandPool = e->
pool;
429 ret = vk->AllocateCommandBuffers(
s->hwctx->act_dev, &cbuf_create, e->
bufs);
430 if (
ret != VK_SUCCESS) {
436 for (
int i = 0;
i < qf->nb_queues;
i++) {
438 vk->GetDeviceQueue(
s->hwctx->act_dev, qf->queue_family,
439 i % qf->actual_queues, &q->
queue);
469 VkCommandBufferBeginInfo cmd_start = {
470 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
471 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
476 VkFenceCreateInfo fence_spawn = {
477 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
479 ret = vk->CreateFence(
s->hwctx->act_dev, &fence_spawn,
s->hwctx->alloc,
481 if (
ret != VK_SUCCESS) {
487 vk->WaitForFences(
s->hwctx->act_dev, 1, &q->
fence, VK_TRUE, UINT64_MAX);
488 vk->ResetFences(
s->hwctx->act_dev, 1, &q->
fence);
495 if (
ret != VK_SUCCESS) {
510 VkPipelineStageFlagBits in_wait_dst_flag)
573 (q->nb_frame_deps + 1) *
sizeof(*dst));
581 if (!q->frame_deps[q->nb_frame_deps]) {
596 VkTimelineSemaphoreSubmitInfo s_timeline_sem_info = {
597 .sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
604 VkSubmitInfo s_info = {
605 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
606 .pNext = &s_timeline_sem_info,
608 .commandBufferCount = 1,
615 .pSignalSemaphores = e->
sem_sig,
620 if (
ret != VK_SUCCESS) {
627 if (
ret != VK_SUCCESS) {
645 if (!deps || !nb_deps)
655 for (
int i = 0;
i < nb_deps;
i++) {
671 int unnorm_coords, VkFilter
filt)
676 VkSamplerCreateInfo sampler_info = {
677 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
679 .minFilter = sampler_info.magFilter,
680 .mipmapMode = unnorm_coords ? VK_SAMPLER_MIPMAP_MODE_NEAREST :
681 VK_SAMPLER_MIPMAP_MODE_LINEAR,
682 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
683 .addressModeV = sampler_info.addressModeU,
684 .addressModeW = sampler_info.addressModeU,
685 .anisotropyEnable = VK_FALSE,
686 .compareOp = VK_COMPARE_OP_NEVER,
687 .borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
688 .unnormalizedCoordinates = unnorm_coords,
695 ret = vk->CreateSampler(
s->hwctx->act_dev, &sampler_info,
696 s->hwctx->alloc, &sctx->
sampler[0]);
697 if (
ret != VK_SUCCESS) {
703 for (
int i = 1;
i < 4;
i++)
724 const int high =
desc->comp[0].depth > 8;
725 return high ?
"rgba16f" :
"rgba8";
738 vk->DestroyImageView(
s->hwctx->act_dev, iv->
view,
s->hwctx->alloc);
743 VkImageView *v, VkImage
img, VkFormat fmt,
744 const VkComponentMapping
map)
750 VkImageViewCreateInfo imgview_spawn = {
751 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
754 .viewType = VK_IMAGE_VIEW_TYPE_2D,
757 .subresourceRange = {
758 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
768 VkResult
ret = vk->CreateImageView(
s->hwctx->act_dev, &imgview_spawn,
769 s->hwctx->alloc, &iv->
view);
770 if (
ret != VK_SUCCESS) {
796 VkShaderStageFlags stage)
804 shd->
shader.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
805 shd->
shader.stage = stage;
810 GLSLC(0, #define IS_WITHIN(v1, v2) ((v1.x < v2.x) && (v1.y < v2.y)) );
823 "local_size_y = %i, local_size_z = %i) in;\n\n",
830 const char *p = shd->
src.str;
831 const char *start = p;
836 for (
int i = 0;
i < strlen(p);
i++) {
849 const char *entrypoint)
854 VkShaderModuleCreateInfo shader_create;
859 shd->
shader.pName = entrypoint;
861 if (!
s->spirv_compiler) {
862 #if CONFIG_LIBGLSLANG
864 #elif CONFIG_LIBSHADERC
869 if (!
s->spirv_compiler)
873 err =
s->spirv_compiler->compile_shader(
s->spirv_compiler,
s, shd, &spirv,
874 &spirv_size, entrypoint, &priv);
879 shd->
name, spirv_size);
881 shader_create.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
882 shader_create.pNext =
NULL;
883 shader_create.codeSize = spirv_size;
884 shader_create.flags = 0;
885 shader_create.pCode = (
void *)spirv;
887 ret = vk->CreateShaderModule(
s->hwctx->act_dev, &shader_create,
NULL,
890 s->spirv_compiler->free_shader(
s->spirv_compiler, &priv);
892 if (
ret != VK_SUCCESS) {
909 [VK_DESCRIPTOR_TYPE_SAMPLER] = {
sizeof(VkDescriptorImageInfo),
"sampler", 1, 0, 0, 0, },
910 [VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] = {
sizeof(VkDescriptorImageInfo),
"texture", 1, 0, 1, 0, },
911 [VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] = {
sizeof(VkDescriptorImageInfo),
"image", 1, 1, 1, 0, },
912 [VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] = {
sizeof(VkDescriptorImageInfo),
"subpassInput", 1, 0, 0, 0, },
913 [VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] = {
sizeof(VkDescriptorImageInfo),
"sampler", 1, 0, 1, 0, },
914 [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] = {
sizeof(VkDescriptorBufferInfo),
NULL, 1, 0, 0, 1, },
915 [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] = {
sizeof(VkDescriptorBufferInfo),
"buffer", 0, 1, 0, 1, },
916 [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] = {
sizeof(VkDescriptorBufferInfo),
NULL, 1, 0, 0, 1, },
917 [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] = {
sizeof(VkDescriptorBufferInfo),
"buffer", 0, 1, 0, 1, },
918 [VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER] = {
sizeof(VkBufferView),
"samplerBuffer", 1, 0, 0, 0, },
919 [VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER] = {
sizeof(VkBufferView),
"imageBuffer", 1, 0, 0, 0, },
924 int num,
int only_print_to_shader)
927 VkDescriptorSetLayout *
layout;
930 if (only_print_to_shader)
948 VkDescriptorSetLayoutCreateInfo desc_create_layout = { 0 };
949 VkDescriptorSetLayoutBinding *desc_binding;
951 desc_binding =
av_mallocz(
sizeof(*desc_binding)*num);
955 for (
int i = 0;
i < num;
i++) {
956 desc_binding[
i].binding =
i;
957 desc_binding[
i].descriptorType =
desc[
i].type;
958 desc_binding[
i].descriptorCount =
FFMAX(
desc[
i].elems, 1);
959 desc_binding[
i].stageFlags =
desc[
i].stages;
960 desc_binding[
i].pImmutableSamplers =
desc[
i].sampler ?
961 desc[
i].sampler->sampler :
965 desc_create_layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
966 desc_create_layout.pBindings = desc_binding;
967 desc_create_layout.bindingCount = num;
970 ret = vk->CreateDescriptorSetLayout(
s->hwctx->act_dev, &desc_create_layout,
972 if (
ret != VK_SUCCESS) {
984 for (
int i = 0;
i < num;
i++) {
1003 VkDescriptorUpdateTemplateCreateInfo *dt;
1004 VkDescriptorUpdateTemplateEntry *des_entries;
1007 des_entries =
av_mallocz(num*
sizeof(VkDescriptorUpdateTemplateEntry));
1011 for (
int i = 0;
i < num;
i++) {
1012 des_entries[
i].dstBinding =
i;
1013 des_entries[
i].descriptorType =
desc[
i].type;
1014 des_entries[
i].descriptorCount =
FFMAX(
desc[
i].elems, 1);
1015 des_entries[
i].dstArrayElement = 0;
1016 des_entries[
i].offset = ((uint8_t *)
desc[
i].updater) - (uint8_t *)
s;
1030 dt[
i].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
1031 dt[
i].templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
1032 dt[
i].descriptorSetLayout =
layout[
i];
1033 dt[
i].pDescriptorUpdateEntries = des_entries;
1034 dt[
i].descriptorUpdateEntryCount = num;
1045 for (
int i = 0;
i < num;
i++) {
1049 if (
desc[
i].mem_layout)
1069 else if (
desc[
i].elems > 0)
1088 vk->UpdateDescriptorSetWithTemplate(
s->hwctx->act_dev,
1099 vk->UpdateDescriptorSetWithTemplate(
s->hwctx->act_dev,
1106 VkShaderStageFlagBits stage,
int offset,
1125 VkDescriptorPoolCreateInfo pool_create_info = {
1126 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
1132 ret = vk->CreateDescriptorPool(
s->hwctx->act_dev, &pool_create_info,
1135 if (
ret != VK_SUCCESS) {
1143 VkDescriptorSetAllocateInfo alloc_info = {
1144 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
1154 ret = vk->AllocateDescriptorSets(
s->hwctx->act_dev, &alloc_info,
1156 if (
ret != VK_SUCCESS) {
1164 VkPipelineLayoutCreateInfo spawn_pipeline_layout = {
1165 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1166 .pSetLayouts = (VkDescriptorSetLayout *)pl->
desc_staging,
1174 ret = vk->CreatePipelineLayout(
s->hwctx->act_dev, &spawn_pipeline_layout,
1178 if (
ret != VK_SUCCESS) {
1186 VkDescriptorUpdateTemplateCreateInfo *dt;
1196 ret = vk->CreateDescriptorUpdateTemplate(
s->hwctx->act_dev,
1197 dt,
s->hwctx->alloc,
1199 if (
ret != VK_SUCCESS) {
1209 av_free((
void *)dt->pDescriptorUpdateEntries);
1234 VkComputePipelineCreateInfo pipe = {
1235 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
1240 if (pl->
shaders[
i]->
shader.stage & VK_SHADER_STAGE_COMPUTE_BIT) {
1250 ret = vk->CreateComputePipelines(
s->hwctx->act_dev, VK_NULL_HANDLE, 1, &pipe,
1252 if (
ret != VK_SUCCESS) {
1258 pl->
bind_point = VK_PIPELINE_BIND_POINT_COMPUTE;
1291 vk->WaitForFences(
s->hwctx->act_dev, 1, &q->
fence, VK_TRUE, UINT64_MAX);
1292 vk->ResetFences(
s->hwctx->act_dev, 1, &q->
fence);
1297 vk->DestroyFence(
s->hwctx->act_dev, q->
fence,
s->hwctx->alloc);
1313 vk->DestroyCommandPool(
s->hwctx->act_dev, e->
pool,
s->hwctx->alloc);
1333 vk->DestroyShaderModule(
s->hwctx->act_dev, shd->
shader.module,
1338 vk->DestroyPipeline(
s->hwctx->act_dev, pl->
pipeline,
s->hwctx->alloc);
1344 vk->DestroyDescriptorUpdateTemplate(
s->hwctx->act_dev, pl->
desc_template[
i],
1347 vk->DestroyDescriptorSetLayout(
s->hwctx->act_dev, pl->
desc_layout[
i],
1353 vk->DestroyDescriptorPool(
s->hwctx->act_dev, pl->
desc_pool,
1370 av_free((
void *)dt->pDescriptorUpdateEntries);
1382 if (
s->spirv_compiler)
1383 s->spirv_compiler->uninit(&
s->spirv_compiler);
1385 for (
int i = 0;
i <
s->exec_ctx_num;
i++)
1389 for (
int i = 0;
i <
s->samplers_num;
i++) {
1390 vk->DestroySampler(
s->hwctx->act_dev,
s->samplers[
i]->sampler[0],
1396 for (
int i = 0;
i <
s->pipelines_num;
i++)
1401 s->scratch_size = 0;