FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_scale_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <string.h>
20 
21 #include <va/va.h>
22 #include <va/va_vpp.h>
23 
24 #include "libavutil/avassert.h"
25 #include "libavutil/hwcontext.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "avfilter.h"
32 #include "formats.h"
33 #include "internal.h"
34 #include "scale.h"
35 #include "video.h"
36 
37 typedef struct ScaleVAAPIContext {
38  const AVClass *class;
39 
42 
43  int valid_ids;
44  VAConfigID va_config;
45  VAContextID va_context;
46 
49 
52 
55 
56  char *w_expr; // width expression string
57  char *h_expr; // height expression string
58 
59  int output_width; // computed width
60  int output_height; // computed height
62 
63 
65 {
66  enum AVPixelFormat pix_fmts[] = {
68  };
69  int err;
70 
71  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
72  &avctx->inputs[0]->out_formats)) < 0)
73  return err;
74  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
75  &avctx->outputs[0]->in_formats)) < 0)
76  return err;
77 
78  return 0;
79 }
80 
82 {
83  if (ctx->va_context != VA_INVALID_ID) {
84  vaDestroyContext(ctx->hwctx->display, ctx->va_context);
85  ctx->va_context = VA_INVALID_ID;
86  }
87 
88  if (ctx->va_config != VA_INVALID_ID) {
89  vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
90  ctx->va_config = VA_INVALID_ID;
91  }
92 
95  ctx->hwctx = 0;
96 
97  return 0;
98 }
99 
101 {
102  AVFilterContext *avctx = inlink->dst;
103  ScaleVAAPIContext *ctx = avctx->priv;
104 
106 
107  if (!inlink->hw_frames_ctx) {
108  av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
109  "required to associate the processing device.\n");
110  return AVERROR(EINVAL);
111  }
112 
115 
116  return 0;
117 }
118 
120 {
121  AVFilterLink *inlink = outlink->src->inputs[0];
122  AVFilterContext *avctx = outlink->src;
123  ScaleVAAPIContext *ctx = avctx->priv;
124  AVVAAPIHWConfig *hwconfig = NULL;
125  AVHWFramesConstraints *constraints = NULL;
126  AVVAAPIFramesContext *va_frames;
127  VAStatus vas;
128  int err, i;
129 
131 
133  ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;
134 
135  av_assert0(ctx->va_config == VA_INVALID_ID);
136  vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
137  VAEntrypointVideoProc, 0, 0, &ctx->va_config);
138  if (vas != VA_STATUS_SUCCESS) {
139  av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
140  "config: %d (%s).\n", vas, vaErrorStr(vas));
141  err = AVERROR(EIO);
142  goto fail;
143  }
144 
145  hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
146  if (!hwconfig) {
147  err = AVERROR(ENOMEM);
148  goto fail;
149  }
150  hwconfig->config_id = ctx->va_config;
151 
153  hwconfig);
154  if (!constraints) {
155  err = AVERROR(ENOMEM);
156  goto fail;
157  }
158 
159  if (ctx->output_format == AV_PIX_FMT_NONE)
160  ctx->output_format = ctx->input_frames->sw_format;
161  if (constraints->valid_sw_formats) {
162  for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
163  if (ctx->output_format == constraints->valid_sw_formats[i])
164  break;
165  }
166  if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) {
167  av_log(ctx, AV_LOG_ERROR, "Hardware does not support output "
168  "format %s.\n", av_get_pix_fmt_name(ctx->output_format));
169  err = AVERROR(EINVAL);
170  goto fail;
171  }
172  }
173 
174  if ((err = ff_scale_eval_dimensions(ctx,
175  ctx->w_expr, ctx->h_expr,
176  inlink, outlink,
177  &ctx->output_width, &ctx->output_height)) < 0)
178  goto fail;
179 
180  if (ctx->output_width < constraints->min_width ||
181  ctx->output_height < constraints->min_height ||
182  ctx->output_width > constraints->max_width ||
183  ctx->output_height > constraints->max_height) {
184  av_log(ctx, AV_LOG_ERROR, "Hardware does not support scaling to "
185  "size %dx%d (constraints: width %d-%d height %d-%d).\n",
186  ctx->output_width, ctx->output_height,
187  constraints->min_width, constraints->max_width,
188  constraints->min_height, constraints->max_height);
189  err = AVERROR(EINVAL);
190  goto fail;
191  }
192 
194  if (!ctx->output_frames_ref) {
195  av_log(ctx, AV_LOG_ERROR, "Failed to create HW frame context "
196  "for output.\n");
197  err = AVERROR(ENOMEM);
198  goto fail;
199  }
200 
202 
205  ctx->output_frames->width = ctx->output_width;
206  ctx->output_frames->height = ctx->output_height;
207 
208  // The number of output frames we need is determined by what follows
209  // the filter. If it's an encoder with complex frame reference
210  // structures then this could be very high.
211  ctx->output_frames->initial_pool_size = 10;
212 
214  if (err < 0) {
215  av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
216  "context for output: %d\n", err);
217  goto fail;
218  }
219 
220  va_frames = ctx->output_frames->hwctx;
221 
222  av_assert0(ctx->va_context == VA_INVALID_ID);
223  vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
224  ctx->output_width, ctx->output_height,
225  VA_PROGRESSIVE,
226  va_frames->surface_ids, va_frames->nb_surfaces,
227  &ctx->va_context);
228  if (vas != VA_STATUS_SUCCESS) {
229  av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
230  "context: %d (%s).\n", vas, vaErrorStr(vas));
231  return AVERROR(EIO);
232  }
233 
234  outlink->w = ctx->output_width;
235  outlink->h = ctx->output_height;
236 
238  if (!outlink->hw_frames_ctx) {
239  err = AVERROR(ENOMEM);
240  goto fail;
241  }
242 
243  av_freep(&hwconfig);
244  av_hwframe_constraints_free(&constraints);
245  return 0;
246 
247 fail:
249  av_freep(&hwconfig);
250  av_hwframe_constraints_free(&constraints);
251  return err;
252 }
253 
255 {
256  switch(av_cs) {
257 #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
258  CS(BT709, BT709);
259  CS(BT470BG, BT601);
260  CS(SMPTE170M, SMPTE170M);
261  CS(SMPTE240M, SMPTE240M);
262 #undef CS
263  default:
264  return VAProcColorStandardNone;
265  }
266 }
267 
268 static int scale_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
269 {
270  AVFilterContext *avctx = inlink->dst;
271  AVFilterLink *outlink = avctx->outputs[0];
272  ScaleVAAPIContext *ctx = avctx->priv;
274  VASurfaceID input_surface, output_surface;
275  VAProcPipelineParameterBuffer params;
276  VABufferID params_id;
277  VARectangle input_region;
278  VAStatus vas;
279  int err;
280 
281  av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
282  av_get_pix_fmt_name(input_frame->format),
283  input_frame->width, input_frame->height, input_frame->pts);
284 
285  if (ctx->va_context == VA_INVALID_ID)
286  return AVERROR(EINVAL);
287 
288  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
289  av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale input.\n",
290  input_surface);
291 
292  output_frame = ff_get_video_buffer(outlink, ctx->output_width,
293  ctx->output_height);
294  if (!output_frame) {
295  err = AVERROR(ENOMEM);
296  goto fail;
297  }
298 
299  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
300  av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale output.\n",
301  output_surface);
302 
303  memset(&params, 0, sizeof(params));
304 
305  // If there were top/left cropping, it could be taken into
306  // account here.
307  input_region = (VARectangle) {
308  .x = 0,
309  .y = 0,
310  .width = input_frame->width,
311  .height = input_frame->height,
312  };
313 
314  params.surface = input_surface;
315  params.surface_region = &input_region;
316  params.surface_color_standard =
318 
319  params.output_region = 0;
320  params.output_background_color = 0xff000000;
321  params.output_color_standard = params.surface_color_standard;
322 
323  params.pipeline_flags = 0;
324  params.filter_flags = VA_FILTER_SCALING_HQ;
325 
326  vas = vaBeginPicture(ctx->hwctx->display,
327  ctx->va_context, output_surface);
328  if (vas != VA_STATUS_SUCCESS) {
329  av_log(ctx, AV_LOG_ERROR, "Failed to attach new picture: "
330  "%d (%s).\n", vas, vaErrorStr(vas));
331  err = AVERROR(EIO);
332  goto fail;
333  }
334 
335  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
336  VAProcPipelineParameterBufferType,
337  sizeof(params), 1, &params, &params_id);
338  if (vas != VA_STATUS_SUCCESS) {
339  av_log(ctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
340  "%d (%s).\n", vas, vaErrorStr(vas));
341  err = AVERROR(EIO);
342  goto fail_after_begin;
343  }
344  av_log(ctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
345  params_id);
346 
347  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
348  &params_id, 1);
349  if (vas != VA_STATUS_SUCCESS) {
350  av_log(ctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
351  "%d (%s).\n", vas, vaErrorStr(vas));
352  err = AVERROR(EIO);
353  goto fail_after_begin;
354  }
355 
356  vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
357  if (vas != VA_STATUS_SUCCESS) {
358  av_log(ctx, AV_LOG_ERROR, "Failed to start picture processing: "
359  "%d (%s).\n", vas, vaErrorStr(vas));
360  err = AVERROR(EIO);
361  goto fail_after_render;
362  }
363 
364  if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
366  vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
367  if (vas != VA_STATUS_SUCCESS) {
368  av_log(ctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
369  "%d (%s).\n", vas, vaErrorStr(vas));
370  // And ignore.
371  }
372  }
373 
374  av_frame_copy_props(output_frame, input_frame);
375  av_frame_free(&input_frame);
376 
377  av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
378  av_get_pix_fmt_name(output_frame->format),
379  output_frame->width, output_frame->height, output_frame->pts);
380 
381  return ff_filter_frame(outlink, output_frame);
382 
383  // We want to make sure that if vaBeginPicture has been called, we also
384  // call vaRenderPicture and vaEndPicture. These calls may well fail or
385  // do something else nasty, but once we're in this failure case there
386  // isn't much else we can do.
387 fail_after_begin:
388  vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
389 fail_after_render:
390  vaEndPicture(ctx->hwctx->display, ctx->va_context);
391 fail:
392  av_frame_free(&input_frame);
393  av_frame_free(&output_frame);
394  return err;
395 }
396 
398 {
399  ScaleVAAPIContext *ctx = avctx->priv;
400 
401  ctx->va_config = VA_INVALID_ID;
402  ctx->va_context = VA_INVALID_ID;
403  ctx->valid_ids = 1;
404 
405  if (ctx->output_format_string) {
407  if (ctx->output_format == AV_PIX_FMT_NONE) {
408  av_log(ctx, AV_LOG_ERROR, "Invalid output format.\n");
409  return AVERROR(EINVAL);
410  }
411  } else {
412  // Use the input format once that is configured.
414  }
415 
416  return 0;
417 }
418 
420 {
421  ScaleVAAPIContext *ctx = avctx->priv;
422 
423  if (ctx->valid_ids)
425 
429 }
430 
431 
432 #define OFFSET(x) offsetof(ScaleVAAPIContext, x)
433 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
434 static const AVOption scale_vaapi_options[] = {
435  { "w", "Output video width",
436  OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, .flags = FLAGS },
437  { "h", "Output video height",
438  OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, .flags = FLAGS },
439  { "format", "Output video format (software format of hardware frames)",
440  OFFSET(output_format_string), AV_OPT_TYPE_STRING, .flags = FLAGS },
441  { NULL },
442 };
443 
444 static const AVClass scale_vaapi_class = {
445  .class_name = "scale_vaapi",
446  .item_name = av_default_item_name,
447  .option = scale_vaapi_options,
448  .version = LIBAVUTIL_VERSION_INT,
449 };
450 
451 static const AVFilterPad scale_vaapi_inputs[] = {
452  {
453  .name = "default",
454  .type = AVMEDIA_TYPE_VIDEO,
455  .filter_frame = &scale_vaapi_filter_frame,
456  .config_props = &scale_vaapi_config_input,
457  },
458  { NULL }
459 };
460 
462  {
463  .name = "default",
464  .type = AVMEDIA_TYPE_VIDEO,
465  .config_props = &scale_vaapi_config_output,
466  },
467  { NULL }
468 };
469 
471  .name = "scale_vaapi",
472  .description = NULL_IF_CONFIG_SMALL("Scale to/from VAAPI surfaces."),
473  .priv_size = sizeof(ScaleVAAPIContext),
477  .inputs = scale_vaapi_inputs,
478  .outputs = scale_vaapi_outputs,
479  .priv_class = &scale_vaapi_class,
480  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
481 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:58
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:385
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Definition: scale.c:106
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
VAAPI-specific data associated with a frame pool.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
char * output_format_string
VAConfigID va_config
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
AVOption.
Definition: opt.h:246
#define LIBAVUTIL_VERSION_INT
Definition: version.h:86
Main libavfilter public API header.
Memory handling functions.
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
#define CS(av, va)
static av_cold void scale_vaapi_uninit(AVFilterContext *avctx)
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:226
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:518
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:206
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:92
static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
AVBufferRef * input_frames_ref
int max_width
The maximum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:455
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:554
const char * name
Pad name.
Definition: internal.h:60
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1151
#define av_cold
Definition: attributes.h:82
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVFilter ff_vf_scale_vaapi
AVOptions.
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:484
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:294
static int scale_vaapi_pipeline_uninit(ScaleVAAPIContext *ctx)
#define av_log(a,...)
static const AVOption scale_vaapi_options[]
A filter pad used for either input or output.
Definition: internal.h:54
static av_cold int scale_vaapi_init(AVFilterContext *avctx)
int width
Definition: frame.h:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
AVVAAPIDeviceContext * hwctx
VAAPI hardware pipeline configuration details.
av_default_item_name
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:179
static int scale_vaapi_config_output(AVFilterLink *outlink)
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
GLenum GLint * params
Definition: opengl_enc.c:114
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:457
simple assert() macros that are a bit more flexible than ISO C assert().
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:323
static const AVFilterPad scale_vaapi_inputs[]
#define fail()
Definition: checkasm.h:109
static int scale_vaapi_config_input(AVFilterLink *inlink)
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:196
VAContextID va_context
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
AVFormatContext * ctx
Definition: movenc.c:48
static const AVClass scale_vaapi_class
static const AVFilterPad outputs[]
Definition: af_afftfilt.c:389
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:274
int min_width
The minimum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:448
static const AVFilterPad inputs[]
Definition: af_afftfilt.c:379
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:430
#define FLAGS
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:529
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:159
unsigned int driver_quirks
Driver quirks to apply - this is filled by av_hwdevice_ctx_init(), with reference to a table of known...
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:846
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
enum AVPixelFormat output_format
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:121
static const AVFilterPad scale_vaapi_outputs[]
const char * name
Filter name.
Definition: avfilter.h:148
AVHWFramesContext * output_frames
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:138
A reference to a data buffer.
Definition: buffer.h:81
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:237
AVHWFramesContext * input_frames
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
static int scale_vaapi_query_formats(AVFilterContext *avctx)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:442
AVBufferRef * device_ref
#define OFFSET(x)
VAAPI connection details.
VAConfigID config_id
ID of a VAAPI pipeline configuration.
An instance of a filter.
Definition: avfilter.h:338
int height
Definition: frame.h:259
#define av_freep(p)
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2347
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2335
internal API functions
static int scale_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:219
AVBufferRef * output_frames_ref
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:603