FFmpeg
vf_scale_npp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * scale video filter
22  */
23 
24 #include <nppi.h>
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/hwcontext.h"
30 #include "libavutil/cuda_check.h"
31 #include "libavutil/internal.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/parseutils.h"
34 #include "libavutil/eval.h"
35 #include "libavutil/pixdesc.h"
36 
37 #include "avfilter.h"
38 #include "formats.h"
39 #include "internal.h"
40 #include "scale_eval.h"
41 #include "video.h"
42 
43 #define CHECK_CU(x) FF_CUDA_CHECK_DL(ctx, device_hwctx->internal->cuda_dl, x)
44 
45 static const enum AVPixelFormat supported_formats[] = {
50 };
51 
52 static const enum AVPixelFormat deinterleaved_formats[][2] = {
54 };
55 
56 enum ScaleStage {
61 };
62 
63 typedef struct NPPScaleStageContext {
67 
68  struct {
69  int width;
70  int height;
71  } planes_in[4], planes_out[4];
72 
76 
77 static const char *const var_names[] = {
78  "in_w", "iw",
79  "in_h", "ih",
80  "out_w", "ow",
81  "out_h", "oh",
82  "a",
83  "sar",
84  "dar",
85  "n",
86  "t",
87 #if FF_API_FRAME_PKT
88  "pos",
89 #endif
90  "main_w",
91  "main_h",
92  "main_a",
93  "main_sar",
94  "main_dar", "mdar",
95  "main_n",
96  "main_t",
97 #if FF_API_FRAME_PKT
98  "main_pos",
99 #endif
100  NULL
101 };
102 
103 enum var_name {
113 #if FF_API_FRAME_PKT
114  VAR_POS,
115 #endif
123 #if FF_API_FRAME_PKT
125 #endif
127 };
128 
129 enum EvalMode {
133 };
134 
135 typedef struct NPPScaleContext {
136  const AVClass *class;
137 
141 
143 
144  /**
145  * New dimensions. Special values are:
146  * 0 = original width/height
147  * -1 = keep original aspect
148  */
149  int w, h;
150 
151  /**
152  * Output sw format. AV_PIX_FMT_NONE for no conversion.
153  */
155 
156  char *w_expr; ///< width expression string
157  char *h_expr; ///< height expression string
158  char *format_str;
159 
162 
164 
165  char* size_str;
166 
169 
171 
174 
176 
177 static int config_props(AVFilterLink *outlink);
178 
180 {
181  NPPScaleContext* scale = ctx->priv;
182  unsigned vars_w[VARS_NB] = {0}, vars_h[VARS_NB] = {0};
183 
184  if (!scale->w_pexpr && !scale->h_pexpr)
185  return AVERROR(EINVAL);
186 
187  if (scale->w_pexpr)
188  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
189  if (scale->h_pexpr)
190  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
191 
192  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
193  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
194  return AVERROR(EINVAL);
195  }
196 
197  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
198  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
199  return AVERROR(EINVAL);
200  }
201 
202  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
203  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
204  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
205  }
206 
207  if (ctx->filter != &ff_vf_scale2ref_npp &&
208  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
209  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
210  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
211  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
212  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
213  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
214  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
215  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
217  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
218 #endif
219  )) {
220  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref_npp variables are not valid in scale_npp filter.\n");
221  return AVERROR(EINVAL);
222  }
223 
224  if (scale->eval_mode == EVAL_MODE_INIT &&
225  (vars_w[VAR_N] || vars_h[VAR_N] ||
226  vars_w[VAR_T] || vars_h[VAR_T] ||
228  vars_w[VAR_POS] || vars_h[VAR_POS] ||
229 #endif
230  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
231  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
233  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
234 #endif
235  ) ) {
236  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', are not valid in init eval_mode.\n");
237  return AVERROR(EINVAL);
238  }
239 
240  return 0;
241 }
242 
243 static int nppscale_parse_expr(AVFilterContext* ctx, char* str_expr,
244  AVExpr** pexpr_ptr, const char* var,
245  const char* args)
246 {
247  NPPScaleContext* scale = ctx->priv;
248  int ret, is_inited = 0;
249  char* old_str_expr = NULL;
250  AVExpr* old_pexpr = NULL;
251 
252  if (str_expr) {
253  old_str_expr = av_strdup(str_expr);
254  if (!old_str_expr)
255  return AVERROR(ENOMEM);
256  av_opt_set(scale, var, args, 0);
257  }
258 
259  if (*pexpr_ptr) {
260  old_pexpr = *pexpr_ptr;
261  *pexpr_ptr = NULL;
262  is_inited = 1;
263  }
264 
265  ret = av_expr_parse(pexpr_ptr, args, var_names, NULL, NULL, NULL, NULL, 0,
266  ctx);
267  if (ret < 0) {
268  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var,
269  args);
270  goto revert;
271  }
272 
273  ret = check_exprs(ctx);
274  if (ret < 0)
275  goto revert;
276 
277  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
278  goto revert;
279 
280  av_expr_free(old_pexpr);
281  old_pexpr = NULL;
282  av_freep(&old_str_expr);
283 
284  return 0;
285 
286 revert:
287  av_expr_free(*pexpr_ptr);
288  *pexpr_ptr = NULL;
289  if (old_str_expr) {
290  av_opt_set(scale, var, old_str_expr, 0);
291  av_free(old_str_expr);
292  }
293  if (old_pexpr)
294  *pexpr_ptr = old_pexpr;
295 
296  return ret;
297 }
298 
300 {
301  NPPScaleContext* scale = ctx->priv;
302  int i, ret;
303 
304  if (!strcmp(scale->format_str, "same")) {
305  scale->format = AV_PIX_FMT_NONE;
306  } else {
307  scale->format = av_get_pix_fmt(scale->format_str);
308  if (scale->format == AV_PIX_FMT_NONE) {
309  av_log(ctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", scale->format_str);
310  return AVERROR(EINVAL);
311  }
312  }
313 
314  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
316  "Size and width/height exprs cannot be set at the same time.\n");
317  return AVERROR(EINVAL);
318  }
319 
320  if (scale->w_expr && !scale->h_expr)
321  FFSWAP(char*, scale->w_expr, scale->size_str);
322 
323  if (scale->size_str) {
324  char buf[32];
325  ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str);
326  if (0 > ret) {
327  av_log(ctx, AV_LOG_ERROR, "Invalid size '%s'\n", scale->size_str);
328  return ret;
329  }
330 
331  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
332  ret = av_opt_set(scale, "w", buf, 0);
333  if (ret < 0)
334  return ret;
335 
336  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
337  ret = av_opt_set(scale, "h", buf, 0);
338  if (ret < 0)
339  return ret;
340  }
341 
342  if (!scale->w_expr) {
343  ret = av_opt_set(scale, "w", "iw", 0);
344  if (ret < 0)
345  return ret;
346  }
347 
348  if (!scale->h_expr) {
349  ret = av_opt_set(scale, "h", "ih", 0);
350  if (ret < 0)
351  return ret;
352  }
353 
354  ret = nppscale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
355  if (ret < 0)
356  return ret;
357 
358  ret = nppscale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
359  if (ret < 0)
360  return ret;
361 
362  for (i = 0; i < FF_ARRAY_ELEMS(scale->stages); i++) {
363  scale->stages[i].frame = av_frame_alloc();
364  if (!scale->stages[i].frame)
365  return AVERROR(ENOMEM);
366  }
367  scale->tmp_frame = av_frame_alloc();
368  if (!scale->tmp_frame)
369  return AVERROR(ENOMEM);
370 
371  return 0;
372 }
373 
375 {
376  NPPScaleContext* scale = ctx->priv;
377  const char scale2ref = ctx->filter == &ff_vf_scale2ref_npp;
378  const AVFilterLink* inlink = ctx->inputs[scale2ref ? 1 : 0];
379  char* expr;
380  int eval_w, eval_h;
381  int ret;
382  double res;
383 
384  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
385  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
386  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
387  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
388  scale->var_values[VAR_A] = (double)inlink->w / inlink->h;
389  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
390  (double)inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
391  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
392 
393  if (scale2ref) {
394  const AVFilterLink* main_link = ctx->inputs[0];
395 
396  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
397  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
398  scale->var_values[VAR_S2R_MAIN_A] = (double)main_link->w / main_link->h;
399  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
400  (double)main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
401  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
402  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
403  }
404 
405  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
406  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
407 
408  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
409  if (isnan(res)) {
410  expr = scale->h_expr;
411  ret = AVERROR(EINVAL);
412  goto fail;
413  }
414  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int)res == 0 ? inlink->h : (int)res;
415 
416  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
417  if (isnan(res)) {
418  expr = scale->w_expr;
419  ret = AVERROR(EINVAL);
420  goto fail;
421  }
422  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
423 
424  scale->w = eval_w;
425  scale->h = eval_h;
426 
427  return 0;
428 
429 fail:
430  av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'.\n",
431  expr);
432  return ret;
433 }
434 
436 {
437  NPPScaleContext *s = ctx->priv;
438  int i;
439 
440  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
441  av_frame_free(&s->stages[i].frame);
442  av_buffer_unref(&s->stages[i].frames_ctx);
443  }
444  av_frame_free(&s->tmp_frame);
445 
446  av_expr_free(s->w_pexpr);
447  av_expr_free(s->h_pexpr);
448  s->w_pexpr = s->h_pexpr = NULL;
449 }
450 
451 static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
452 {
453  AVBufferRef *out_ref = NULL;
454  AVHWFramesContext *out_ctx;
455  int in_sw, in_sh, out_sw, out_sh;
456  int ret, i;
457 
458  av_pix_fmt_get_chroma_sub_sample(stage->in_fmt, &in_sw, &in_sh);
459  av_pix_fmt_get_chroma_sub_sample(stage->out_fmt, &out_sw, &out_sh);
460  if (!stage->planes_out[0].width) {
461  stage->planes_out[0].width = stage->planes_in[0].width;
462  stage->planes_out[0].height = stage->planes_in[0].height;
463  }
464 
465  for (i = 1; i < FF_ARRAY_ELEMS(stage->planes_in); i++) {
466  stage->planes_in[i].width = stage->planes_in[0].width >> in_sw;
467  stage->planes_in[i].height = stage->planes_in[0].height >> in_sh;
468  stage->planes_out[i].width = stage->planes_out[0].width >> out_sw;
469  stage->planes_out[i].height = stage->planes_out[0].height >> out_sh;
470  }
471 
472  if (AV_PIX_FMT_YUVA420P == stage->in_fmt) {
473  stage->planes_in[3].width = stage->planes_in[0].width;
474  stage->planes_in[3].height = stage->planes_in[0].height;
475  stage->planes_out[3].width = stage->planes_out[0].width;
476  stage->planes_out[3].height = stage->planes_out[0].height;
477  }
478 
479  out_ref = av_hwframe_ctx_alloc(device_ctx);
480  if (!out_ref)
481  return AVERROR(ENOMEM);
482  out_ctx = (AVHWFramesContext*)out_ref->data;
483 
484  out_ctx->format = AV_PIX_FMT_CUDA;
485  out_ctx->sw_format = stage->out_fmt;
486  out_ctx->width = FFALIGN(stage->planes_out[0].width, 32);
487  out_ctx->height = FFALIGN(stage->planes_out[0].height, 32);
488 
489  ret = av_hwframe_ctx_init(out_ref);
490  if (ret < 0)
491  goto fail;
492 
493  av_frame_unref(stage->frame);
494  ret = av_hwframe_get_buffer(out_ref, stage->frame, 0);
495  if (ret < 0)
496  goto fail;
497 
498  stage->frame->width = stage->planes_out[0].width;
499  stage->frame->height = stage->planes_out[0].height;
500 
501  av_buffer_unref(&stage->frames_ctx);
502  stage->frames_ctx = out_ref;
503 
504  return 0;
505 fail:
506  av_buffer_unref(&out_ref);
507  return ret;
508 }
509 
510 static int format_is_supported(enum AVPixelFormat fmt)
511 {
512  int i;
513 
514  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
515  if (supported_formats[i] == fmt)
516  return 1;
517  return 0;
518 }
519 
521 {
523  int i, planes;
524 
526  if (planes == desc->nb_components)
527  return fmt;
528  for (i = 0; i < FF_ARRAY_ELEMS(deinterleaved_formats); i++)
529  if (deinterleaved_formats[i][0] == fmt)
530  return deinterleaved_formats[i][1];
531  return AV_PIX_FMT_NONE;
532 }
533 
534 static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height,
535  int out_width, int out_height)
536 {
537  NPPScaleContext *s = ctx->priv;
538 
539  AVHWFramesContext *in_frames_ctx;
540 
541  enum AVPixelFormat in_format;
542  enum AVPixelFormat out_format;
543  enum AVPixelFormat in_deinterleaved_format;
544  enum AVPixelFormat out_deinterleaved_format;
545 
546  int i, ret, last_stage = -1;
547 
548  /* check that we have a hw context */
549  if (!ctx->inputs[0]->hw_frames_ctx) {
550  av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
551  return AVERROR(EINVAL);
552  }
553  in_frames_ctx = (AVHWFramesContext*)ctx->inputs[0]->hw_frames_ctx->data;
554  in_format = in_frames_ctx->sw_format;
555  out_format = (s->format == AV_PIX_FMT_NONE) ? in_format : s->format;
556 
557  if (!format_is_supported(in_format)) {
558  av_log(ctx, AV_LOG_ERROR, "Unsupported input format: %s\n",
559  av_get_pix_fmt_name(in_format));
560  return AVERROR(ENOSYS);
561  }
562  if (!format_is_supported(out_format)) {
563  av_log(ctx, AV_LOG_ERROR, "Unsupported output format: %s\n",
564  av_get_pix_fmt_name(out_format));
565  return AVERROR(ENOSYS);
566  }
567 
568  in_deinterleaved_format = get_deinterleaved_format(in_format);
569  out_deinterleaved_format = get_deinterleaved_format(out_format);
570  if (in_deinterleaved_format == AV_PIX_FMT_NONE ||
571  out_deinterleaved_format == AV_PIX_FMT_NONE)
572  return AVERROR_BUG;
573 
574  /* figure out which stages need to be done */
575  if (in_width != out_width || in_height != out_height ||
576  in_deinterleaved_format != out_deinterleaved_format) {
577  s->stages[STAGE_RESIZE].stage_needed = 1;
578 
579  if (s->interp_algo == NPPI_INTER_SUPER &&
580  (out_width > in_width && out_height > in_height)) {
581  s->interp_algo = NPPI_INTER_LANCZOS;
582  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using lanczos instead.\n");
583  }
584  if (s->interp_algo == NPPI_INTER_SUPER &&
585  !(out_width < in_width && out_height < in_height)) {
586  s->interp_algo = NPPI_INTER_CUBIC;
587  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using cubic instead.\n");
588  }
589  }
590 
591  if (!s->stages[STAGE_RESIZE].stage_needed && in_format == out_format)
592  s->passthrough = 1;
593 
594  if (!s->passthrough) {
595  if (in_format != in_deinterleaved_format)
596  s->stages[STAGE_DEINTERLEAVE].stage_needed = 1;
597  if (out_format != out_deinterleaved_format)
598  s->stages[STAGE_INTERLEAVE].stage_needed = 1;
599  }
600 
601  s->stages[STAGE_DEINTERLEAVE].in_fmt = in_format;
602  s->stages[STAGE_DEINTERLEAVE].out_fmt = in_deinterleaved_format;
603  s->stages[STAGE_DEINTERLEAVE].planes_in[0].width = in_width;
604  s->stages[STAGE_DEINTERLEAVE].planes_in[0].height = in_height;
605 
606  s->stages[STAGE_RESIZE].in_fmt = in_deinterleaved_format;
607  s->stages[STAGE_RESIZE].out_fmt = out_deinterleaved_format;
608  s->stages[STAGE_RESIZE].planes_in[0].width = in_width;
609  s->stages[STAGE_RESIZE].planes_in[0].height = in_height;
610  s->stages[STAGE_RESIZE].planes_out[0].width = out_width;
611  s->stages[STAGE_RESIZE].planes_out[0].height = out_height;
612 
613  s->stages[STAGE_INTERLEAVE].in_fmt = out_deinterleaved_format;
614  s->stages[STAGE_INTERLEAVE].out_fmt = out_format;
615  s->stages[STAGE_INTERLEAVE].planes_in[0].width = out_width;
616  s->stages[STAGE_INTERLEAVE].planes_in[0].height = out_height;
617 
618  /* init the hardware contexts */
619  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
620  if (!s->stages[i].stage_needed)
621  continue;
622 
623  ret = init_stage(&s->stages[i], in_frames_ctx->device_ref);
624  if (ret < 0)
625  return ret;
626 
627  last_stage = i;
628  }
629 
630  if (last_stage >= 0)
631  ctx->outputs[0]->hw_frames_ctx = av_buffer_ref(s->stages[last_stage].frames_ctx);
632  else
633  ctx->outputs[0]->hw_frames_ctx = av_buffer_ref(ctx->inputs[0]->hw_frames_ctx);
634 
635  if (!ctx->outputs[0]->hw_frames_ctx)
636  return AVERROR(ENOMEM);
637 
638  return 0;
639 }
640 
641 static int config_props(AVFilterLink *outlink)
642 {
643  AVFilterContext *ctx = outlink->src;
644  AVFilterLink *inlink0 = outlink->src->inputs[0];
645  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref_npp ?
646  outlink->src->inputs[1] :
647  outlink->src->inputs[0];
648  NPPScaleContext *s = ctx->priv;
649  int ret;
650 
651  if ((ret = nppscale_eval_dimensions(ctx)) < 0)
652  goto fail;
653 
655  s->force_original_aspect_ratio,
656  s->force_divisible_by);
657 
658  if (s->w > INT_MAX || s->h > INT_MAX ||
659  (s->h * inlink->w) > INT_MAX ||
660  (s->w * inlink->h) > INT_MAX)
661  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
662 
663  outlink->w = s->w;
664  outlink->h = s->h;
665 
666  ret = init_processing_chain(ctx, inlink0->w, inlink0->h, outlink->w, outlink->h);
667  if (ret < 0)
668  return ret;
669 
670  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d -> w:%d h:%d\n",
671  inlink->w, inlink->h, outlink->w, outlink->h);
672 
673  if (inlink->sample_aspect_ratio.num)
674  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h*inlink->w,
675  outlink->w*inlink->h},
676  inlink->sample_aspect_ratio);
677  else
678  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
679 
680  return 0;
681 
682 fail:
683  return ret;
684 }
685 
686 static int config_props_ref(AVFilterLink *outlink)
687 {
688  AVFilterLink *inlink = outlink->src->inputs[1];
689  AVFilterContext *ctx = outlink->src;
690 
691  outlink->w = inlink->w;
692  outlink->h = inlink->h;
693  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
694  outlink->time_base = inlink->time_base;
695  outlink->frame_rate = inlink->frame_rate;
696 
697  ctx->outputs[1]->hw_frames_ctx = av_buffer_ref(ctx->inputs[1]->hw_frames_ctx);
698 
699  return 0;
700 }
701 
703  AVFrame *out, AVFrame *in)
704 {
705  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext*)in->hw_frames_ctx->data;
706  NppStatus err;
707 
708  switch (in_frames_ctx->sw_format) {
709  case AV_PIX_FMT_NV12:
710  err = nppiYCbCr420_8u_P2P3R(in->data[0], in->linesize[0],
711  in->data[1], in->linesize[1],
712  out->data, out->linesize,
713  (NppiSize){ in->width, in->height });
714  break;
715  default:
716  return AVERROR_BUG;
717  }
718  if (err != NPP_SUCCESS) {
719  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
720  return AVERROR_UNKNOWN;
721  }
722 
723  return 0;
724 }
725 
727  AVFrame *out, AVFrame *in)
728 {
729  NPPScaleContext *s = ctx->priv;
730  NppStatus err;
731  int i;
732 
733  for (i = 0; i < FF_ARRAY_ELEMS(stage->planes_in) && i < FF_ARRAY_ELEMS(in->data) && in->data[i]; i++) {
734  int iw = stage->planes_in[i].width;
735  int ih = stage->planes_in[i].height;
736  int ow = stage->planes_out[i].width;
737  int oh = stage->planes_out[i].height;
738 
739  err = nppiResizeSqrPixel_8u_C1R(in->data[i], (NppiSize){ iw, ih },
740  in->linesize[i], (NppiRect){ 0, 0, iw, ih },
741  out->data[i], out->linesize[i],
742  (NppiRect){ 0, 0, ow, oh },
743  (double)ow / iw, (double)oh / ih,
744  0.0, 0.0, s->interp_algo);
745  if (err != NPP_SUCCESS) {
746  av_log(ctx, AV_LOG_ERROR, "NPP resize error: %d\n", err);
747  return AVERROR_UNKNOWN;
748  }
749  }
750 
751  return 0;
752 }
753 
755  AVFrame *out, AVFrame *in)
756 {
757  AVHWFramesContext *out_frames_ctx = (AVHWFramesContext*)out->hw_frames_ctx->data;
758  NppStatus err;
759 
760  switch (out_frames_ctx->sw_format) {
761  case AV_PIX_FMT_NV12:
762  err = nppiYCbCr420_8u_P3P2R((const uint8_t**)in->data,
763  in->linesize,
764  out->data[0], out->linesize[0],
765  out->data[1], out->linesize[1],
766  (NppiSize){ in->width, in->height });
767  break;
768  default:
769  return AVERROR_BUG;
770  }
771  if (err != NPP_SUCCESS) {
772  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
773  return AVERROR_UNKNOWN;
774  }
775 
776  return 0;
777 }
778 
780  AVFrame *out, AVFrame *in) = {
784 };
785 
787 {
788  AVFilterContext *ctx = link->dst;
789  NPPScaleContext *s = ctx->priv;
790  AVFilterLink *outlink = ctx->outputs[0];
791  AVFrame *src = in;
792  char buf[32];
793  int i, ret, last_stage = -1;
794  int frame_changed;
795 
796  frame_changed = in->width != link->w ||
797  in->height != link->h ||
798  in->format != link->format ||
801 
802  if (s->eval_mode == EVAL_MODE_FRAME || frame_changed) {
803  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
804 
805  av_expr_count_vars(s->w_pexpr, vars_w, VARS_NB);
806  av_expr_count_vars(s->h_pexpr, vars_h, VARS_NB);
807 
808  if (s->eval_mode == EVAL_MODE_FRAME && !frame_changed && ctx->filter != &ff_vf_scale2ref_npp &&
809  !(vars_w[VAR_N] || vars_w[VAR_T]
811  || vars_w[VAR_POS]
812 #endif
813  ) &&
814  !(vars_h[VAR_N] || vars_h[VAR_T]
816  || vars_h[VAR_POS]
817 #endif
818  ) && s->w && s->h)
819  goto scale;
820 
821  if (s->eval_mode == EVAL_MODE_INIT) {
822  snprintf(buf, sizeof(buf)-1, "%d", outlink->w);
823  av_opt_set(s, "w", buf, 0);
824  snprintf(buf, sizeof(buf)-1, "%d", outlink->h);
825  av_opt_set(s, "h", buf, 0);
826 
827  ret = nppscale_parse_expr(ctx, NULL, &s->w_pexpr, "width", s->w_expr);
828  if (ret < 0)
829  return ret;
830 
831  ret = nppscale_parse_expr(ctx, NULL, &s->h_pexpr, "height", s->h_expr);
832  if (ret < 0)
833  return ret;
834  }
835 
836  if (ctx->filter == &ff_vf_scale2ref_npp) {
837  s->var_values[VAR_S2R_MAIN_N] = link->frame_count_out;
838  s->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
839 #if FF_API_FRAME_PKT
841  s->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
843 #endif
844  } else {
845  s->var_values[VAR_N] = link->frame_count_out;
846  s->var_values[VAR_T] = TS2T(in->pts, link->time_base);
847 #if FF_API_FRAME_PKT
849  s->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
851 #endif
852  }
853 
854  link->format = in->format;
855  link->w = in->width;
856  link->h = in->height;
857 
860 
861  if ((ret = config_props(outlink)) < 0)
862  return ret;
863  }
864 
865 scale:
866  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
867  if (!s->stages[i].stage_needed)
868  continue;
869 
870  ret = nppscale_process[i](ctx, &s->stages[i], s->stages[i].frame, src);
871  if (ret < 0)
872  return ret;
873 
874  src = s->stages[i].frame;
875  last_stage = i;
876  }
877  if (last_stage < 0)
878  return AVERROR_BUG;
879 
880  ret = av_hwframe_get_buffer(src->hw_frames_ctx, s->tmp_frame, 0);
881  if (ret < 0)
882  return ret;
883 
884  s->tmp_frame->width = src->width;
885  s->tmp_frame->height = src->height;
886 
888  av_frame_move_ref(src, s->tmp_frame);
889 
890  ret = av_frame_copy_props(out, in);
891  if (ret < 0)
892  return ret;
893 
894  return 0;
895 }
896 
898 {
899  AVFilterContext *ctx = link->dst;
900  NPPScaleContext *s = ctx->priv;
901  AVFilterLink *outlink = ctx->outputs[0];
902  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)outlink->hw_frames_ctx->data;
903  AVCUDADeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
904 
905  AVFrame *out = NULL;
906  CUcontext dummy;
907  int ret = 0;
908 
909  if (s->passthrough)
910  return ff_filter_frame(outlink, in);
911 
912  out = av_frame_alloc();
913  if (!out) {
914  ret = AVERROR(ENOMEM);
915  goto fail;
916  }
917 
918  ret = CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPushCurrent(device_hwctx->cuda_ctx));
919  if (ret < 0)
920  goto fail;
921 
922  ret = nppscale_scale(link, out, in);
923 
924  CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPopCurrent(&dummy));
925  if (ret < 0)
926  goto fail;
927 
928  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
929  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
930  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
931  INT_MAX);
932 
933  av_frame_free(&in);
934  return ff_filter_frame(outlink, out);
935 fail:
936  av_frame_free(&in);
937  av_frame_free(&out);
938  return ret;
939 }
940 
942 {
943  NPPScaleContext *scale = link->dst->priv;
944  AVFilterLink *outlink = link->dst->outputs[1];
945  int frame_changed;
946 
947  frame_changed = in->width != link->w ||
948  in->height != link->h ||
949  in->format != link->format ||
952 
953  if (frame_changed) {
954  link->format = in->format;
955  link->w = in->width;
956  link->h = in->height;
959 
960  config_props_ref(outlink);
961  }
962 
963  if (scale->eval_mode == EVAL_MODE_FRAME) {
964  scale->var_values[VAR_N] = link->frame_count_out;
965  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
966 #if FF_API_FRAME_PKT
968  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
970 #endif
971  }
972 
973  return ff_filter_frame(outlink, in);
974 }
975 
976 static int request_frame(AVFilterLink *outlink)
977 {
978  return ff_request_frame(outlink->src->inputs[0]);
979 }
980 
981 static int request_frame_ref(AVFilterLink *outlink)
982 {
983  return ff_request_frame(outlink->src->inputs[1]);
984 }
985 
986 #define OFFSET(x) offsetof(NPPScaleContext, x)
987 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
988 static const AVOption options[] = {
989  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
990  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
991  { "format", "Output pixel format", OFFSET(format_str), AV_OPT_TYPE_STRING, { .str = "same" }, .flags = FLAGS },
992  { "s", "Output video size", OFFSET(size_str), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS },
993 
994  { "interp_algo", "Interpolation algorithm used for resizing", OFFSET(interp_algo), AV_OPT_TYPE_INT, { .i64 = NPPI_INTER_CUBIC }, 0, INT_MAX, FLAGS, .unit = "interp_algo" },
995  { "nn", "nearest neighbour", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_NN }, 0, 0, FLAGS, .unit = "interp_algo" },
996  { "linear", "linear", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LINEAR }, 0, 0, FLAGS, .unit = "interp_algo" },
997  { "cubic", "cubic", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC }, 0, 0, FLAGS, .unit = "interp_algo" },
998  { "cubic2p_bspline", "2-parameter cubic (B=1, C=0)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_BSPLINE }, 0, 0, FLAGS, .unit = "interp_algo" },
999  { "cubic2p_catmullrom", "2-parameter cubic (B=0, C=1/2)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_CATMULLROM }, 0, 0, FLAGS, .unit = "interp_algo" },
1000  { "cubic2p_b05c03", "2-parameter cubic (B=1/2, C=3/10)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_B05C03 }, 0, 0, FLAGS, .unit = "interp_algo" },
1001  { "super", "supersampling", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_SUPER }, 0, 0, FLAGS, .unit = "interp_algo" },
1002  { "lanczos", "Lanczos", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LANCZOS }, 0, 0, FLAGS, .unit = "interp_algo" },
1003  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 2, FLAGS, .unit = "force_oar" },
1004  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1005  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1006  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1007  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 256, FLAGS },
1008  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, { .i64 = EVAL_MODE_INIT }, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1009  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_INIT }, 0, 0, FLAGS, .unit = "eval" },
1010  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_FRAME }, 0, 0, FLAGS, .unit = "eval" },
1011  { NULL },
1012 };
1013 
1014 static const AVClass nppscale_class = {
1015  .class_name = "nppscale",
1016  .item_name = av_default_item_name,
1017  .option = options,
1018  .version = LIBAVUTIL_VERSION_INT,
1019  .category = AV_CLASS_CATEGORY_FILTER,
1020 };
1021 
1022 static const AVFilterPad nppscale_inputs[] = {
1023  {
1024  .name = "default",
1025  .type = AVMEDIA_TYPE_VIDEO,
1026  .filter_frame = nppscale_filter_frame,
1027  }
1028 };
1029 
1030 static const AVFilterPad nppscale_outputs[] = {
1031  {
1032  .name = "default",
1033  .type = AVMEDIA_TYPE_VIDEO,
1034  .config_props = config_props,
1035  }
1036 };
1037 
1039  .name = "scale_npp",
1040  .description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1041  "scaling and format conversion"),
1042 
1043  .init = nppscale_init,
1044  .uninit = nppscale_uninit,
1045 
1046  .priv_size = sizeof(NPPScaleContext),
1047  .priv_class = &nppscale_class,
1048 
1051 
1053 
1054  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1055 };
1056 
1058  {
1059  .name = "default",
1060  .type = AVMEDIA_TYPE_VIDEO,
1061  .filter_frame = nppscale_filter_frame,
1062  },
1063  {
1064  .name = "ref",
1065  .type = AVMEDIA_TYPE_VIDEO,
1066  .filter_frame = nppscale_filter_frame_ref,
1067  }
1068 };
1069 
1071  {
1072  .name = "default",
1073  .type = AVMEDIA_TYPE_VIDEO,
1074  .config_props = config_props,
1075  .request_frame= request_frame,
1076  },
1077  {
1078  .name = "ref",
1079  .type = AVMEDIA_TYPE_VIDEO,
1080  .config_props = config_props_ref,
1081  .request_frame= request_frame_ref,
1082  }
1083 };
1084 
1086  .name = "scale2ref_npp",
1087  .description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1088  "scaling and format conversion to the "
1089  "given reference."),
1090 
1091  .init = nppscale_init,
1092  .uninit = nppscale_uninit,
1093 
1094  .priv_size = sizeof(NPPScaleContext),
1095  .priv_class = &nppscale_class,
1096 
1099 
1101 
1102  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1103 };
format_is_supported
static int format_is_supported(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:510
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
NPPScaleContext::passthrough
int passthrough
Definition: vf_scale_npp.c:140
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
VAR_OW
@ VAR_OW
Definition: vf_scale_npp.c:106
nppscale_inputs
static const AVFilterPad nppscale_inputs[]
Definition: vf_scale_npp.c:1022
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nppscale2ref_inputs
static const AVFilterPad nppscale2ref_inputs[]
Definition: vf_scale_npp.c:1057
opt.h
var_name
var_name
Definition: noise.c:46
hwcontext_cuda_internal.h
out
FILE * out
Definition: movenc.c:54
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:351
NPPScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale_npp.c:168
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2962
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
CHECK_CU
#define CHECK_CU(x)
Definition: vf_scale_npp.c:43
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
ScaleStage
ScaleStage
Definition: vf_scale_npp.c:56
nppscale_class
static const AVClass nppscale_class
Definition: vf_scale_npp.c:1014
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale_npp.c:121
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:130
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:344
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:456
AVFrame::width
int width
Definition: frame.h:416
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
NPPScaleStageContext::stage_needed
int stage_needed
Definition: vf_scale_npp.c:64
VAR_A
@ VAR_A
Definition: vf_scale_npp.c:108
AVOption
AVOption.
Definition: opt.h:346
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:462
supported_formats
static enum AVPixelFormat supported_formats[]
Definition: vf_scale_npp.c:45
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
STAGE_NB
@ STAGE_NB
Definition: vf_scale_npp.c:60
VAR_SAR
@ VAR_SAR
Definition: vf_scale_npp.c:109
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale_npp.c:122
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
video.h
NPPScaleStageContext::frame
AVFrame * frame
Definition: vf_scale_npp.c:74
VAR_OH
@ VAR_OH
Definition: vf_scale_npp.c:107
NPPScaleContext::size_str
char * size_str
Definition: vf_scale_npp.c:165
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:365
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:711
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:101
av_pix_fmt_count_planes
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3002
fail
#define fail()
Definition: checkasm.h:179
get_deinterleaved_format
static enum AVPixelFormat get_deinterleaved_format(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:520
dummy
int dummy
Definition: motion.c:66
STAGE_RESIZE
@ STAGE_RESIZE
Definition: vf_scale_npp.c:58
NPPScaleContext::tmp_frame
AVFrame * tmp_frame
Definition: vf_scale_npp.c:139
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2990
NPPScaleStageContext::planes_in
struct NPPScaleStageContext::@286 planes_in[4]
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:738
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
NPPScaleContext::shift_width
int shift_width
Definition: vf_scale_npp.c:142
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:359
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:118
TS2T
#define TS2T(ts, tb)
Definition: internal.h:259
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_IH
@ VAR_IH
Definition: vf_scale_npp.c:105
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
nppscale_uninit
static void nppscale_uninit(AVFilterContext *ctx)
Definition: vf_scale_npp.c:435
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:783
NPPScaleContext::stages
NPPScaleStageContext stages[STAGE_NB]
Definition: vf_scale_npp.c:138
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale_npp.c:641
NPPScaleContext::eval_mode
int eval_mode
Definition: vf_scale_npp.c:172
nppscale_eval_dimensions
static int nppscale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale_npp.c:374
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:793
NPPScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale_npp.c:160
AVExpr
Definition: eval.c:159
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale_npp.c:179
NAN
#define NAN
Definition: mathematics.h:115
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
nppscale2ref_outputs
static const AVFilterPad nppscale2ref_outputs[]
Definition: vf_scale_npp.c:1070
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
nppscale_scale
static int nppscale_scale(AVFilterLink *link, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:786
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:981
NPPScaleContext
Definition: vf_scale_npp.c:135
if
if(ret)
Definition: filter_design.txt:179
FLAGS
#define FLAGS
Definition: vf_scale_npp.c:987
var_names
static const char *const var_names[]
Definition: vf_scale_npp.c:77
init_stage
static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
Definition: vf_scale_npp.c:451
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
nppscale_interleave
static int nppscale_interleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:754
NULL
#define NULL
Definition: coverity.c:32
ff_vf_scale2ref_npp
const AVFilter ff_vf_scale2ref_npp
Definition: vf_scale_npp.c:175
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:679
VAR_POS
@ VAR_POS
Definition: noise.c:55
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
isnan
#define isnan(x)
Definition: libm.h:340
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale_npp.c:130
VAR_IW
@ VAR_IW
Definition: vf_scale_npp.c:104
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
parseutils.h
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale_npp.c:976
NPPScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale_npp.c:157
double
double
Definition: af_crystalizer.c:131
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
nppscale_parse_expr
static int nppscale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale_npp.c:243
NPPScaleContext::shift_height
int shift_height
Definition: vf_scale_npp.c:142
NPPScaleStageContext::height
int height
Definition: vf_scale_npp.c:70
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale_npp.c:105
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale_npp.c:117
eval.h
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale_npp.c:118
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: vvc_intra.c:291
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
NPPScaleStageContext::out_fmt
enum AVPixelFormat out_fmt
Definition: vf_scale_npp.c:66
OFFSET
#define OFFSET(x)
Definition: vf_scale_npp.c:986
NPPScaleStageContext::frames_ctx
AVBufferRef * frames_ctx
Definition: vf_scale_npp.c:73
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:471
nppscale_filter_frame_ref
static int nppscale_filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:941
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:654
nppscale_filter_frame
static int nppscale_filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:897
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:431
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale_npp.c:104
scale_eval.h
init_processing_chain
static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height, int out_width, int out_height)
Definition: vf_scale_npp.c:534
NPPScaleStageContext::planes_out
struct NPPScaleStageContext::@286 planes_out[4]
VARS_NB
@ VARS_NB
Definition: vf_scale_npp.c:126
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale_npp.c:116
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale_npp.c:106
nppscale_resize
static int nppscale_resize(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:726
internal.h
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale_npp.c:120
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: internal.h:172
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale_npp.c:119
NPPScaleContext::h
int h
Definition: vf_scale_npp.c:149
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
internal.h
ff_vf_scale_npp
const AVFilter ff_vf_scale_npp
Definition: vf_scale_npp.c:1038
EvalMode
EvalMode
Definition: af_volume.h:39
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale_npp.c:131
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:603
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:576
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
NPPScaleContext::w
int w
New dimensions.
Definition: vf_scale_npp.c:149
AVFilter
Filter definition.
Definition: avfilter.h:166
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
AVCUDADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_cuda.h:42
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:686
ret
ret
Definition: filter_design.txt:187
NPPScaleContext::format
enum AVPixelFormat format
Output sw format.
Definition: vf_scale_npp.c:154
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
cuda_check.h
NPPScaleContext::interp_algo
int interp_algo
Definition: vf_scale_npp.c:163
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:451
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
NPPScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale_npp.c:156
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2894
NPPScaleStageContext::in_fmt
enum AVPixelFormat in_fmt
Definition: vf_scale_npp.c:65
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:695
nppscale_init
static av_cold int nppscale_init(AVFilterContext *ctx)
Definition: vf_scale_npp.c:299
AVFrame::height
int height
Definition: frame.h:416
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale_npp.c:132
NPPScaleContext::format_str
char * format_str
Definition: vf_scale_npp.c:158
VAR_DAR
@ VAR_DAR
Definition: vf_scale_npp.c:110
STAGE_INTERLEAVE
@ STAGE_INTERLEAVE
Definition: vf_scale_npp.c:59
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
avfilter.h
nppscale_process
static int(*const nppscale_process[])(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:779
nppscale_outputs
static const AVFilterPad nppscale_outputs[]
Definition: vf_scale_npp.c:1030
NPPScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale_npp.c:161
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
NPPScaleStageContext::width
int width
Definition: vf_scale_npp.c:69
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale_npp.c:120
NPPScaleStageContext
Definition: vf_scale_npp.c:63
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
STAGE_DEINTERLEAVE
@ STAGE_DEINTERLEAVE
Definition: vf_scale_npp.c:57
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
VAR_T
@ VAR_T
Definition: vf_scale_npp.c:112
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
desc
const char * desc
Definition: libsvtav1.c:75
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
options
static const AVOption options[]
Definition: vf_scale_npp.c:988
nppscale_deinterleave
static int nppscale_deinterleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:702
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale_npp.c:107
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
planes
static const struct @386 planes[]
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
deinterleaved_formats
static enum AVPixelFormat deinterleaved_formats[][2]
Definition: vf_scale_npp.c:52
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:389
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
NPPScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale_npp.c:170
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:239
int
int
Definition: ffmpeg_filter.c:409
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:491
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
snprintf
#define snprintf
Definition: snprintf.h:34
NPPScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale_npp.c:167
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2882
VAR_N
@ VAR_N
Definition: vf_scale_npp.c:111