FFmpeg
vf_pad_opencl.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "libavutil/colorspace.h"
20 #include "libavutil/eval.h"
21 #include "libavutil/opt.h"
22 #include "libavutil/imgutils.h"
23 #include "avfilter.h"
24 #include "drawutils.h"
25 #include "formats.h"
26 #include "internal.h"
27 #include "opencl.h"
28 #include "opencl_source.h"
29 #include "video.h"
30 
31 static const char *const var_names[] = {
32  "in_w", "iw",
33  "in_h", "ih",
34  "out_w", "ow",
35  "out_h", "oh",
36  "x",
37  "y",
38  "a",
39  "sar",
40  "dar",
41  NULL
42 };
43 
44 enum var_name {
55 };
56 
57 typedef struct PadOpenCLContext {
60  int is_rgb;
61  int is_packed;
62  int hsub, vsub;
63 
64  char *w_expr;
65  char *h_expr;
66  char *x_expr;
67  char *y_expr;
69 
70  cl_command_queue command_queue;
71  cl_kernel kernel_pad;
72 
73  int w, h;
74  int x, y;
77  cl_float4 pad_color_float;
78  cl_int2 pad_pos;
80 
81 static int pad_opencl_init(AVFilterContext *avctx, AVFrame *input_frame)
82 {
83  PadOpenCLContext *ctx = avctx->priv;
84  AVHWFramesContext *input_frames_ctx = (AVHWFramesContext *)input_frame->hw_frames_ctx->data;
85  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(input_frames_ctx->sw_format);
86  uint8_t rgba_map[4];
87  cl_int cle;
88  int err;
89 
90  ff_fill_rgba_map(rgba_map, input_frames_ctx->sw_format);
91  ctx->is_rgb = !!(desc->flags & AV_PIX_FMT_FLAG_RGB);
92  ctx->is_packed = !(desc->flags & AV_PIX_FMT_FLAG_PLANAR);
93  ctx->hsub = desc->log2_chroma_w;
94  ctx->vsub = desc->log2_chroma_h;
95 
97  if (err < 0)
98  goto fail;
99 
100  ctx->command_queue = clCreateCommandQueue(
101  ctx->ocf.hwctx->context,
102  ctx->ocf.hwctx->device_id,
103  0,
104  &cle
105  );
106 
107  if (ctx->is_rgb) {
108  ctx->pad_color[rgba_map[0]] = ctx->pad_rgba[0];
109  ctx->pad_color[rgba_map[1]] = ctx->pad_rgba[1];
110  ctx->pad_color[rgba_map[2]] = ctx->pad_rgba[2];
111  ctx->pad_color[rgba_map[3]] = ctx->pad_rgba[3];
112  } else {
113  ctx->pad_color[0] = RGB_TO_Y_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2]);
114  ctx->pad_color[1] = RGB_TO_U_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2], 0);
115  ctx->pad_color[2] = RGB_TO_V_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2], 0);
116  ctx->pad_color[3] = ctx->pad_rgba[3];
117  }
118 
119  CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to create OpenCL command queue %d.\n", cle);
120 
121  ctx->kernel_pad = clCreateKernel(ctx->ocf.program, "pad", &cle);
122  CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to create pad kernel: %d.\n", cle);
123 
124  for (int i = 0; i < 4; ++i) {
125  ctx->pad_color_float.s[i] = (float)ctx->pad_color[i] / 255.0;
126  }
127 
128  ctx->pad_pos.s[0] = ctx->x;
129  ctx->pad_pos.s[1] = ctx->y;
130 
131  ctx->initialized = 1;
132  return 0;
133 
134 fail:
135  if (ctx->command_queue)
136  clReleaseCommandQueue(ctx->command_queue);
137  if (ctx->kernel_pad)
138  clReleaseKernel(ctx->kernel_pad);
139  return err;
140 }
141 
142 static int filter_frame(AVFilterLink *link, AVFrame *input_frame)
143 {
144  AVFilterContext *avctx = link->dst;
145  AVFilterLink *outlink = avctx->outputs[0];
146  PadOpenCLContext *pad_ctx = avctx->priv;
148  int err;
149  cl_int cle;
150  size_t global_work[2];
151  cl_mem src, dst;
152 
153  if (!input_frame->hw_frames_ctx)
154  return AVERROR(EINVAL);
155 
156  if (!pad_ctx->initialized) {
157  err = pad_opencl_init(avctx, input_frame);
158  if (err < 0)
159  goto fail;
160  }
161 
162  output_frame = ff_get_video_buffer(outlink, outlink->w, outlink->h);
163  if (!output_frame) {
164  err = AVERROR(ENOMEM);
165  goto fail;
166  }
167 
168  for (int p = 0; p < FF_ARRAY_ELEMS(output_frame->data); p++) {
169  cl_float4 pad_color_float;
170  cl_int2 pad_pos;
171 
172  if (pad_ctx->is_packed) {
173  pad_color_float = pad_ctx->pad_color_float;
174  } else {
175  pad_color_float.s[0] = pad_ctx->pad_color_float.s[p];
176  pad_color_float.s[1] = pad_ctx->pad_color_float.s[2];
177  }
178 
179  if (p > 0 && p < 3) {
180  pad_pos.s[0] = pad_ctx->pad_pos.s[0] >> pad_ctx->hsub;
181  pad_pos.s[1] = pad_ctx->pad_pos.s[1] >> pad_ctx->vsub;
182  } else {
183  pad_pos.s[0] = pad_ctx->pad_pos.s[0];
184  pad_pos.s[1] = pad_ctx->pad_pos.s[1];
185  }
186 
187  src = (cl_mem)input_frame->data[p];
188  dst = (cl_mem)output_frame->data[p];
189 
190  if (!dst)
191  break;
192 
193  CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 0, cl_mem, &src);
194  CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 1, cl_mem, &dst);
195  CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 2, cl_float4, &pad_color_float);
196  CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 3, cl_int2, &pad_pos);
197 
198  err = ff_opencl_filter_work_size_from_image(avctx, global_work, output_frame, p, 16);
199  if (err < 0)
200  goto fail;
201 
202  cle = clEnqueueNDRangeKernel(pad_ctx->command_queue, pad_ctx->kernel_pad, 2, NULL,
203  global_work, NULL, 0, NULL, NULL);
204 
205  CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to enqueue pad kernel: %d.\n", cle);
206  }
207 
208  // Run queued kernel
209  cle = clFinish(pad_ctx->command_queue);
210  CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to finish command queue: %d.\n", cle);
211 
212  err = av_frame_copy_props(output_frame, input_frame);
213  if (err < 0)
214  goto fail;
215 
216  av_frame_free(&input_frame);
217 
218  return ff_filter_frame(outlink, output_frame);
219 
220 fail:
221  clFinish(pad_ctx->command_queue);
222  av_frame_free(&input_frame);
223  av_frame_free(&output_frame);
224  return err;
225 }
226 
228 {
229  PadOpenCLContext *ctx = avctx->priv;
230  cl_int cle;
231 
232  if (ctx->kernel_pad) {
233  cle = clReleaseKernel(ctx->kernel_pad);
234  if (cle != CL_SUCCESS)
235  av_log(avctx, AV_LOG_ERROR, "Failed to release "
236  "kernel: %d.\n", cle);
237  }
238 
239  if (ctx->command_queue) {
240  cle = clReleaseCommandQueue(ctx->command_queue);
241  if (cle != CL_SUCCESS)
242  av_log(avctx, AV_LOG_ERROR, "Failed to release "
243  "command queue: %d.\n", cle);
244  }
245 
247 }
248 
250 {
251  AVFilterContext *avctx = outlink->src;
252  AVFilterLink *inlink = avctx->inputs[0];
253  PadOpenCLContext *ctx = avctx->priv;
254  AVRational adjusted_aspect = ctx->aspect;
255  double var_values[VARS_NB], res;
256  int err, ret;
257  char *expr;
258 
259  var_values[VAR_IN_W] = var_values[VAR_IW] = inlink->w;
260  var_values[VAR_IN_H] = var_values[VAR_IH] = inlink->h;
261  var_values[VAR_OUT_W] = var_values[VAR_OW] = NAN;
262  var_values[VAR_OUT_H] = var_values[VAR_OH] = NAN;
263  var_values[VAR_A] = (double) inlink->w / inlink->h;
264  var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
265  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
266  var_values[VAR_DAR] = var_values[VAR_A] * var_values[VAR_SAR];
267 
268  av_expr_parse_and_eval(&res, (expr = ctx->w_expr),
269  var_names, var_values,
270  NULL, NULL, NULL, NULL, NULL, 0, ctx);
271  ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res;
272  if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->h_expr),
273  var_names, var_values,
274  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
275  return ret;
276  ctx->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = res;
277  if (!ctx->h)
278  var_values[VAR_OUT_H] = var_values[VAR_OH] = ctx->h = inlink->h;
279 
280  /* evaluate the width again, as it may depend on the evaluated output height */
281  if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->w_expr),
282  var_names, var_values,
283  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
284  return ret;
285  ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res;
286  if (!ctx->w)
287  var_values[VAR_OUT_W] = var_values[VAR_OW] = ctx->w = inlink->w;
288 
289  if (adjusted_aspect.num && adjusted_aspect.den) {
290  adjusted_aspect = av_div_q(adjusted_aspect, inlink->sample_aspect_ratio);
291  if (ctx->h < av_rescale(ctx->w, adjusted_aspect.den, adjusted_aspect.num)) {
292  ctx->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = av_rescale(ctx->w, adjusted_aspect.den, adjusted_aspect.num);
293  } else {
294  ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = av_rescale(ctx->h, adjusted_aspect.num, adjusted_aspect.den);
295  }
296  }
297 
298  /* evaluate x and y */
299  av_expr_parse_and_eval(&res, (expr = ctx->x_expr),
300  var_names, var_values,
301  NULL, NULL, NULL, NULL, NULL, 0, ctx);
302  ctx->x = var_values[VAR_X] = res;
303  if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->y_expr),
304  var_names, var_values,
305  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
306  return ret;
307  ctx->y = var_values[VAR_Y] = res;
308  /* evaluate x again, as it may depend on the evaluated y value */
309  if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->x_expr),
310  var_names, var_values,
311  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
312  return ret;
313  ctx->x = var_values[VAR_X] = res;
314 
315  if (ctx->x < 0 || ctx->x + inlink->w > ctx->w)
316  ctx->x = var_values[VAR_X] = (ctx->w - inlink->w) / 2;
317  if (ctx->y < 0 || ctx->y + inlink->h > ctx->h)
318  ctx->y = var_values[VAR_Y] = (ctx->h - inlink->h) / 2;
319 
320  /* sanity check params */
321  if (ctx->w < inlink->w || ctx->h < inlink->h) {
322  av_log(ctx, AV_LOG_ERROR, "Padded dimensions cannot be smaller than input dimensions.\n");
323  return AVERROR(EINVAL);
324  }
325 
326  if (ctx->w > avctx->inputs[0]->w) {
327  ctx->ocf.output_width = ctx->w;
328  } else {
329  ctx->ocf.output_width = avctx->inputs[0]->w;
330  }
331 
332  if (ctx->h > avctx->inputs[0]->h) {
333  ctx->ocf.output_height = ctx->h;
334  } else {
335  ctx->ocf.output_height = avctx->inputs[0]->h;
336  }
337 
338  if (ctx->x + avctx->inputs[0]->w > ctx->ocf.output_width ||
339  ctx->y + avctx->inputs[0]->h > ctx->ocf.output_height) {
340  return AVERROR(EINVAL);
341  }
342 
343  err = ff_opencl_filter_config_output(outlink);
344  if (err < 0)
345  return err;
346 
347  return 0;
348 }
349 
350 static const AVFilterPad pad_opencl_inputs[] = {
351  {
352  .name = "default",
353  .type = AVMEDIA_TYPE_VIDEO,
354  .filter_frame = filter_frame,
355  .config_props = &ff_opencl_filter_config_input,
356  },
357  { NULL }
358 };
359 
360 static const AVFilterPad pad_opencl_outputs[] = {
361  {
362  .name = "default",
363  .type = AVMEDIA_TYPE_VIDEO,
364  .config_props = &pad_opencl_config_output,
365  },
366  { NULL }
367 };
368 
369 #define OFFSET(x) offsetof(PadOpenCLContext, x)
370 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
371 
372 static const AVOption pad_opencl_options[] = {
373  { "width", "set the pad area width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, 0, 0, FLAGS },
374  { "w", "set the pad area width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, 0, 0, FLAGS },
375  { "height", "set the pad area height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, 0, 0, FLAGS },
376  { "h", "set the pad area height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, 0, 0, FLAGS },
377  { "x", "set the x offset for the input image position", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "0"}, 0, INT16_MAX, FLAGS },
378  { "y", "set the y offset for the input image position", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "0"}, 0, INT16_MAX, FLAGS },
379  { "color", "set the color of the padded area border", OFFSET(pad_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
380  { "aspect", "pad to fit an aspect instead of a resolution", OFFSET(aspect), AV_OPT_TYPE_RATIONAL, {.dbl = 0}, 0, INT16_MAX, FLAGS },
381  { NULL }
382 };
383 
384 AVFILTER_DEFINE_CLASS(pad_opencl);
385 
387  .name = "pad_opencl",
388  .description = NULL_IF_CONFIG_SMALL("Pad the input video."),
389  .priv_size = sizeof(PadOpenCLContext),
390  .priv_class = &pad_opencl_class,
394  .inputs = pad_opencl_inputs,
395  .outputs = pad_opencl_outputs,
396  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE
397 };
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:365
static int pad_opencl_config_output(AVFilterLink *outlink)
#define RGB_TO_Y_BT709(r, g, b)
Definition: colorspace.h:126
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2549
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
AVOption.
Definition: opt.h:246
int ff_opencl_filter_work_size_from_image(AVFilterContext *avctx, size_t *work_size, AVFrame *frame, int plane, int block_alignment)
Find the work size needed needed for a given plane of an image.
Definition: opencl.c:278
int ff_opencl_filter_config_input(AVFilterLink *inlink)
Check that the input link contains a suitable hardware frames context and extract the device from it...
Definition: opencl.c:60
int ff_opencl_filter_query_formats(AVFilterContext *avctx)
Return that all inputs and outputs support only AV_PIX_FMT_OPENCL.
Definition: opencl.c:28
misc image utilities
Main libavfilter public API header.
const char * desc
Definition: nvenc.c:78
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
AVFILTER_DEFINE_CLASS(pad_opencl)
int num
Numerator.
Definition: rational.h:59
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
int ff_opencl_filter_init(AVFilterContext *avctx)
Initialise an OpenCL filter context.
Definition: opencl.c:147
AVOpenCLDeviceContext * hwctx
Definition: opencl.h:41
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame...
Definition: frame.h:639
const char * name
Pad name.
Definition: internal.h:60
cl_float4 pad_color_float
Definition: vf_pad_opencl.c:77
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
uint8_t
#define av_cold
Definition: attributes.h:82
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
uint8_t pad_rgba[4]
Definition: vf_pad_opencl.c:75
const char * ff_opencl_source_pad
cl_device_id device_id
The primary device ID of the device.
#define RGB_TO_V_BT709(r1, g1, b1, shift)
Definition: colorspace.h:134
static const AVFilterPad pad_opencl_inputs[]
static const char *const var_names[]
Definition: vf_pad_opencl.c:31
int ff_opencl_filter_config_output(AVFilterLink *outlink)
Create a suitable hardware frames context for the output.
Definition: opencl.c:96
#define av_log(a,...)
#define RGB_TO_U_BT709(r1, g1, b1, shift)
Definition: colorspace.h:130
A filter pad used for either input or output.
Definition: internal.h:54
#define src
Definition: vp8dsp.c:254
int av_expr_parse_and_eval(double *d, const char *s, const char *const *const_names, const double *const_values, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), void *opaque, int log_offset, void *log_ctx)
Parse and evaluate an expression.
Definition: eval.c:776
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
Various defines for YUV<->RGB conversion.
static int filter_frame(AVFilterLink *link, AVFrame *input_frame)
OpenCLFilterContext ocf
Definition: vf_pad_opencl.c:58
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:148
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define fail()
Definition: checkasm.h:123
var_name
Definition: aeval.c:46
cl_command_queue command_queue
Definition: vf_pad_opencl.c:70
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:129
#define NAN
Definition: mathematics.h:64
#define OFFSET(x)
AVFormatContext * ctx
Definition: movenc.c:48
AVFilter ff_vf_pad_opencl
cl_kernel kernel_pad
Definition: vf_pad_opencl.c:71
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define FF_ARRAY_ELEMS(a)
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
misc drawing utilities
#define CL_SET_KERNEL_ARG(kernel, arg_num, type, arg)
set argument to specific Kernel.
Definition: opencl.h:61
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
uint8_t * data
The data buffer.
Definition: buffer.h:89
#define FLAGS
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:851
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
const char * name
Filter name.
Definition: avfilter.h:148
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
AVRational aspect
Definition: vf_pad_opencl.c:68
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
#define CL_FAIL_ON_ERROR(errcode,...)
A helper macro to handle OpenCL errors.
Definition: opencl.h:74
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:314
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
static const AVOption pad_opencl_options[]
void ff_opencl_filter_uninit(AVFilterContext *avctx)
Uninitialise an OpenCL filter context.
Definition: opencl.c:156
int den
Denominator.
Definition: rational.h:60
cl_context context
The OpenCL context which will contain all operations and frames on this device.
static av_cold void pad_opencl_uninit(AVFilterContext *avctx)
An instance of a filter.
Definition: avfilter.h:338
cl_program program
Definition: opencl.h:43
int ff_opencl_filter_load_program(AVFilterContext *avctx, const char **program_source_array, int nb_strings)
Load a new OpenCL program from strings in memory.
Definition: opencl.c:171
static int pad_opencl_init(AVFilterContext *avctx, AVFrame *input_frame)
Definition: vf_pad_opencl.c:81
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
static const AVFilterPad pad_opencl_outputs[]
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:144
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
simple arithmetic expression evaluator
uint8_t pad_color[4]
Definition: vf_pad_opencl.c:76