FFmpeg
vf_chromaber_vulkan.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "libavutil/random_seed.h"
20 #include "libavutil/opt.h"
21 #include "vulkan.h"
22 #include "internal.h"
23 
24 #define CGROUPS (int [3]){ 32, 32, 1 }
25 
28 
32 
33  /* Shader updators, must be in the main filter struct */
34  VkDescriptorImageInfo input_images[3];
35  VkDescriptorImageInfo output_images[3];
36 
37  /* Push constants / options */
38  struct {
39  float dist[2];
40  } opts;
42 
43 static const char distort_chroma_kernel[] = {
44  C(0, void distort_rgb(ivec2 size, ivec2 pos) )
45  C(0, { )
46  C(1, const vec2 p = ((vec2(pos)/vec2(size)) - 0.5f)*2.0f; )
47  C(1, const vec2 o = p * (dist - 1.0f); )
48  C(0, )
49  C(1, vec4 res; )
50  C(1, res.r = texture(input_img[0], ((p - o)/2.0f) + 0.5f).r; )
51  C(1, res.g = texture(input_img[0], ((p )/2.0f) + 0.5f).g; )
52  C(1, res.b = texture(input_img[0], ((p + o)/2.0f) + 0.5f).b; )
53  C(1, res.a = texture(input_img[0], ((p )/2.0f) + 0.5f).a; )
54  C(1, imageStore(output_img[0], pos, res); )
55  C(0, } )
56  C(0, )
57  C(0, void distort_chroma(int idx, ivec2 size, ivec2 pos) )
58  C(0, { )
59  C(1, vec2 p = ((vec2(pos)/vec2(size)) - 0.5f)*2.0f; )
60  C(1, float d = sqrt(p.x*p.x + p.y*p.y); )
61  C(1, p *= d / (d* dist); )
62  C(1, vec4 res = texture(input_img[idx], (p/2.0f) + 0.5f); )
63  C(1, imageStore(output_img[idx], pos, res); )
64  C(0, } )
65 };
66 
68 {
69  int err;
71 
72  /* Create a sampler */
73  VkSampler *sampler = ff_vk_init_sampler(ctx, 0, VK_FILTER_LINEAR);
74  if (!sampler)
75  return AVERROR_EXTERNAL;
76 
78  s->vkctx.queue_count = GET_QUEUE_COUNT(s->vkctx.hwctx, 0, 1, 0);
80 
81  s->pl = ff_vk_create_pipeline(ctx);
82  if (!s->pl)
83  return AVERROR(ENOMEM);
84 
85  /* Normalize options */
86  s->opts.dist[0] = (s->opts.dist[0] / 100.0f) + 1.0f;
87  s->opts.dist[1] = (s->opts.dist[1] / 100.0f) + 1.0f;
88 
89  { /* Create the shader */
91  VulkanDescriptorSetBinding desc_i[2] = {
92  {
93  .name = "input_img",
94  .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
95  .dimensions = 2,
96  .elems = planes,
97  .stages = VK_SHADER_STAGE_COMPUTE_BIT,
98  .updater = s->input_images,
99  .samplers = DUP_SAMPLER_ARRAY4(*sampler),
100  },
101  {
102  .name = "output_img",
103  .type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
104  .mem_layout = ff_vk_shader_rep_fmt(s->vkctx.output_format),
105  .mem_quali = "writeonly",
106  .dimensions = 2,
107  .elems = planes,
108  .stages = VK_SHADER_STAGE_COMPUTE_BIT,
109  .updater = s->output_images,
110  },
111  };
112 
113  SPIRVShader *shd = ff_vk_init_shader(ctx, s->pl, "chromaber_compute",
114  VK_SHADER_STAGE_COMPUTE_BIT);
115  if (!shd)
116  return AVERROR(ENOMEM);
117 
119 
120  GLSLC(0, layout(push_constant, std430) uniform pushConstants { );
121  GLSLC(1, vec2 dist; );
122  GLSLC(0, }; );
123  GLSLC(0, );
124 
125  ff_vk_add_push_constant(ctx, s->pl, 0, sizeof(s->opts),
126  VK_SHADER_STAGE_COMPUTE_BIT);
127 
128  RET(ff_vk_add_descriptor_set(ctx, s->pl, shd, desc_i, 2, 0)); /* set 0 */
129 
131  GLSLC(0, void main() );
132  GLSLC(0, { );
133  GLSLC(1, ivec2 pos = ivec2(gl_GlobalInvocationID.xy); );
134  if (planes == 1) {
135  GLSLC(1, distort_rgb(imageSize(output_img[0]), pos); );
136  } else {
137  GLSLC(1, ivec2 size = imageSize(output_img[0]); );
138  GLSLC(1, vec2 npos = vec2(pos)/vec2(size); );
139  GLSLC(1, vec4 res = texture(input_img[0], npos); );
140  GLSLC(1, imageStore(output_img[0], pos, res); );
141  for (int i = 1; i < planes; i++) {
142  GLSLC(0, );
143  GLSLF(1, size = imageSize(output_img[%i]); ,i);
144  GLSLC(1, if (IS_WITHIN(pos, size)) { );
145  GLSLF(2, distort_chroma(%i, size, pos); ,i);
146  GLSLC(1, } else { );
147  GLSLC(2, npos = vec2(pos)/vec2(size); );
148  GLSLF(2, res = texture(input_img[%i], npos); ,i);
149  GLSLF(2, imageStore(output_img[%i], pos, res); ,i);
150  GLSLC(1, } );
151  }
152  }
153  GLSLC(0, } );
154 
155  RET(ff_vk_compile_shader(ctx, shd, "main"));
156  }
157 
160 
161  /* Execution context */
162  RET(ff_vk_create_exec_ctx(ctx, &s->exec));
163 
164  s->initialized = 1;
165 
166  return 0;
167 
168 fail:
169  return err;
170 }
171 
172 static int process_frames(AVFilterContext *avctx, AVFrame *out_f, AVFrame *in_f)
173 {
174  int err = 0;
175  VkCommandBuffer cmd_buf;
177  AVVkFrame *in = (AVVkFrame *)in_f->data[0];
178  AVVkFrame *out = (AVVkFrame *)out_f->data[0];
180 
181  /* Update descriptors and init the exec context */
182  ff_vk_start_exec_recording(avctx, s->exec);
183  cmd_buf = ff_vk_get_exec_buf(avctx, s->exec);
184 
185  for (int i = 0; i < planes; i++) {
186  RET(ff_vk_create_imageview(avctx, s->exec, &s->input_images[i].imageView,
187  in->img[i],
190 
191  RET(ff_vk_create_imageview(avctx, s->exec, &s->output_images[i].imageView,
192  out->img[i],
195 
196  s->input_images[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
197  s->output_images[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
198  }
199 
200  ff_vk_update_descriptor_set(avctx, s->pl, 0);
201 
202  for (int i = 0; i < planes; i++) {
203  VkImageMemoryBarrier bar[2] = {
204  {
205  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
206  .srcAccessMask = 0,
207  .dstAccessMask = VK_ACCESS_SHADER_READ_BIT,
208  .oldLayout = in->layout[i],
209  .newLayout = s->input_images[i].imageLayout,
210  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
211  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
212  .image = in->img[i],
213  .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
214  .subresourceRange.levelCount = 1,
215  .subresourceRange.layerCount = 1,
216  },
217  {
218  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
219  .srcAccessMask = 0,
220  .dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
221  .oldLayout = out->layout[i],
222  .newLayout = s->output_images[i].imageLayout,
223  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
224  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
225  .image = out->img[i],
226  .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
227  .subresourceRange.levelCount = 1,
228  .subresourceRange.layerCount = 1,
229  },
230  };
231 
232  vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
233  VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0,
234  0, NULL, 0, NULL, FF_ARRAY_ELEMS(bar), bar);
235 
236  in->layout[i] = bar[0].newLayout;
237  in->access[i] = bar[0].dstAccessMask;
238 
239  out->layout[i] = bar[1].newLayout;
240  out->access[i] = bar[1].dstAccessMask;
241  }
242 
243  ff_vk_bind_pipeline_exec(avctx, s->exec, s->pl);
244 
245  ff_vk_update_push_exec(avctx, s->exec, VK_SHADER_STAGE_COMPUTE_BIT,
246  0, sizeof(s->opts), &s->opts);
247 
248  vkCmdDispatch(cmd_buf,
250  FFALIGN(s->vkctx.output_height, CGROUPS[1])/CGROUPS[1], 1);
251 
252  ff_vk_add_exec_dep(avctx, s->exec, in_f, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
253  ff_vk_add_exec_dep(avctx, s->exec, out_f, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
254 
255  err = ff_vk_submit_exec_queue(avctx, s->exec);
256  if (err)
257  return err;
258 
259  return err;
260 
261 fail:
262  ff_vk_discard_exec_deps(avctx, s->exec);
263  return err;
264 }
265 
267 {
268  int err;
269  AVFilterContext *ctx = link->dst;
271  AVFilterLink *outlink = ctx->outputs[0];
272 
273  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
274  if (!out) {
275  err = AVERROR(ENOMEM);
276  goto fail;
277  }
278 
279  if (!s->initialized)
280  RET(init_filter(ctx, in));
281 
282  RET(process_frames(ctx, out, in));
283 
284  err = av_frame_copy_props(out, in);
285  if (err < 0)
286  goto fail;
287 
288  av_frame_free(&in);
289 
290  return ff_filter_frame(outlink, out);
291 
292 fail:
293  av_frame_free(&in);
294  av_frame_free(&out);
295  return err;
296 }
297 
299 {
301 
302  ff_vk_filter_uninit(avctx);
303 
304  s->initialized = 0;
305 }
306 
307 #define OFFSET(x) offsetof(ChromaticAberrationVulkanContext, x)
308 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
310  { "dist_x", "Set horizontal distortion amount", OFFSET(opts.dist[0]), AV_OPT_TYPE_FLOAT, {.dbl = 0.0f}, -10.0f, 10.0f, .flags = FLAGS },
311  { "dist_y", "Set vertical distortion amount", OFFSET(opts.dist[1]), AV_OPT_TYPE_FLOAT, {.dbl = 0.0f}, -10.0f, 10.0f, .flags = FLAGS },
312  { NULL },
313 };
314 
315 AVFILTER_DEFINE_CLASS(chromaber_vulkan);
316 
318  {
319  .name = "default",
320  .type = AVMEDIA_TYPE_VIDEO,
321  .filter_frame = &chromaber_vulkan_filter_frame,
322  .config_props = &ff_vk_filter_config_input,
323  },
324  { NULL }
325 };
326 
328  {
329  .name = "default",
330  .type = AVMEDIA_TYPE_VIDEO,
331  .config_props = &ff_vk_filter_config_output,
332  },
333  { NULL }
334 };
335 
337  .name = "chromaber_vulkan",
338  .description = NULL_IF_CONFIG_SMALL("Offset chroma of input video (chromatic aberration)"),
339  .priv_size = sizeof(ChromaticAberrationVulkanContext),
343  .inputs = chromaber_vulkan_inputs,
344  .outputs = chromaber_vulkan_outputs,
345  .priv_class = &chromaber_vulkan_class,
346  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
347 };
#define NULL
Definition: coverity.c:32
int ff_vk_add_exec_dep(AVFilterContext *avctx, FFVkExecContext *e, AVFrame *frame, VkPipelineStageFlagBits in_wait_dst_flag)
Adds a frame as a queue dependency.
Definition: vulkan.c:464
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:339
int ff_vk_init_pipeline_layout(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes the pipeline layout after all shaders and descriptor sets have been finished.
Definition: vulkan.c:1180
int ff_vk_add_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, SPIRVShader *shd, VulkanDescriptorSetBinding *desc, int num, int only_print_to_shader)
Adds a descriptor set to the shader and registers them in the pipeline.
Definition: vulkan.c:1020
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
AVFilter ff_vf_chromaber_vulkan
void ff_vk_filter_uninit(AVFilterContext *avctx)
Definition: vulkan.c:1415
AVOption.
Definition: opt.h:248
const char * ff_vk_shader_rep_fmt(enum AVPixelFormat pixfmt)
Gets the glsl format string for a pixel format.
Definition: vulkan.c:817
static av_cold int init_filter(AVFilterContext *ctx, AVFrame *in)
const char * name
Definition: vulkan.h:74
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2613
const char * g
Definition: vf_curves.c:115
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int ff_vk_init_compute_pipeline(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes a compute pipeline.
Definition: vulkan.c:1281
struct ChromaticAberrationVulkanContext::@220 opts
int ff_vk_create_imageview(AVFilterContext *avctx, FFVkExecContext *e, VkImageView *v, VkImage img, VkFormat fmt, const VkComponentMapping map)
Create an imageview.
Definition: vulkan.c:836
int ff_vk_filter_config_output(AVFilterLink *outlink)
Definition: vulkan.c:705
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
int ff_vk_start_exec_recording(AVFilterContext *avctx, FFVkExecContext *e)
Begin recording to the command buffer.
Definition: vulkan.c:417
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
const VkFormat * av_vkfmt_from_pixfmt(enum AVPixelFormat p)
Returns the format of each image up to the number of planes for a given sw_format.
VkImage img[AV_NUM_DATA_POINTERS]
Vulkan images to which the memory is bound to.
const char * name
Pad name.
Definition: internal.h:60
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1091
int ff_vk_add_push_constant(AVFilterContext *avctx, VulkanPipeline *pl, int offset, int size, VkShaderStageFlagBits stage)
Define a push constant for a given stage into a pipeline.
Definition: vulkan.c:318
#define av_cold
Definition: attributes.h:88
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
#define f(width, name)
Definition: cbs_vp9.c:255
void ff_vk_set_compute_shader_sizes(AVFilterContext *avctx, SPIRVShader *shd, int local_size[3])
Writes the workgroup size for a shader.
Definition: vulkan.c:909
int queue_family_comp_index
Queue family index for compute ops, and the amount of queues enabled.
int ff_vk_filter_init(AVFilterContext *avctx)
Definition: vulkan.c:756
ptrdiff_t size
Definition: opengl_enc.c:100
#define FFALIGN(x, a)
Definition: macros.h:48
A filter pad used for either input or output.
Definition: internal.h:54
static const struct @323 planes[]
int ff_vk_filter_query_formats(AVFilterContext *avctx)
General lavfi IO functions.
Definition: vulkan.c:592
enum AVPixelFormat input_format
Definition: vulkan.h:176
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:153
const char * r
Definition: vf_curves.c:114
void * priv
private data for use by the filter
Definition: avfilter.h:354
unsigned int pos
Definition: spdifenc.c:410
#define fail()
Definition: checkasm.h:123
static int chromaber_vulkan_filter_frame(AVFilterLink *link, AVFrame *in)
#define b
Definition: input.c:41
VkSampler * ff_vk_init_sampler(AVFilterContext *avctx, int unnorm_coords, VkFilter filt)
Create a Vulkan sampler, will be auto-freed in ff_vk_filter_uninit()
Definition: vulkan.c:769
static const AVFilterPad chromaber_vulkan_outputs[]
int ff_vk_create_exec_ctx(AVFilterContext *avctx, FFVkExecContext **ctx)
Init an execution context for command recording and queue submission.
Definition: vulkan.c:339
VkAccessFlagBits access[AV_NUM_DATA_POINTERS]
Updated after every barrier.
int ff_vk_submit_exec_queue(AVFilterContext *avctx, FFVkExecContext *e)
Submits a command buffer to the queue for execution.
Definition: vulkan.c:522
AVFormatContext * ctx
Definition: movenc.c:48
#define GLSLF(N, S,...)
Definition: vulkan.h:40
#define s(width, name)
Definition: cbs_vp9.c:257
int main(int argc, char *argv[])
static const AVFilterPad chromaber_vulkan_inputs[]
#define GLSLC(N, S)
Definition: vulkan.h:38
void ff_vk_bind_pipeline_exec(AVFilterContext *avctx, FFVkExecContext *e, VulkanPipeline *pl)
Add a command to bind the completed pipeline and its descriptor sets.
Definition: vulkan.c:1316
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define FF_ARRAY_ELEMS(a)
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
const VkComponentMapping ff_comp_identity_map
Definition: vulkan.c:44
#define OFFSET(x)
int ff_vk_compile_shader(AVFilterContext *avctx, SPIRVShader *shd, const char *entrypoint)
Compiles the shader, entrypoint must be set to "main".
Definition: vulkan.c:942
SPIRVShader * ff_vk_init_shader(AVFilterContext *avctx, VulkanPipeline *pl, const char *name, VkShaderStageFlags stage)
Inits a shader for a specific pipeline.
Definition: vulkan.c:888
AVVulkanDeviceContext * hwctx
Definition: vulkan.h:165
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Filter definition.
Definition: avfilter.h:145
VulkanPipeline * ff_vk_create_pipeline(AVFilterContext *avctx)
Inits a pipeline.
Definition: vulkan.c:1276
const char * name
Filter name.
Definition: avfilter.h:149
#define CGROUPS
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
AVFILTER_DEFINE_CLASS(chromaber_vulkan)
VkCommandBuffer ff_vk_get_exec_buf(AVFilterContext *avctx, FFVkExecContext *e)
Gets the command buffer to use for this submission from the exe context.
Definition: vulkan.c:458
#define GET_QUEUE_COUNT(hwctx, graph, comp, tx)
Definition: vulkan.h:53
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:351
void ff_vk_discard_exec_deps(AVFilterContext *avctx, FFVkExecContext *e)
Discards all queue dependencies.
Definition: vulkan.c:400
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
VkDescriptorImageInfo output_images[3]
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
#define FLAGS
enum AVPixelFormat output_format
Definition: vulkan.h:175
VkDescriptorImageInfo input_images[3]
void ff_vk_update_push_exec(AVFilterContext *avctx, FFVkExecContext *e, VkShaderStageFlagBits stage, int offset, size_t size, void *src)
Updates push constants.
Definition: vulkan.c:1171
static const AVOption chromaber_vulkan_options[]
#define GLSLD(D)
Definition: vulkan.h:41
static const char distort_chroma_kernel[]
static void chromaber_vulkan_uninit(AVFilterContext *avctx)
int ff_vk_filter_config_input(AVFilterLink *inlink)
Definition: vulkan.c:635
An instance of a filter.
Definition: avfilter.h:339
VkImageLayout layout[AV_NUM_DATA_POINTERS]
#define RET(x)
Definition: vulkan.h:46
FILE * out
Definition: movenc.c:54
uint32_t av_get_random_seed(void)
Get a seed to use in conjunction with random functions.
Definition: random_seed.c:120
#define DUP_SAMPLER_ARRAY4(x)
Definition: vulkan.h:64
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:671
int i
Definition: input.c:407
static int process_frames(AVFilterContext *avctx, AVFrame *out_f, AVFrame *in_f)
void ff_vk_update_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, int set_id)
Updates a descriptor set via the updaters defined.
Definition: vulkan.c:1160