FFmpeg
vsrc_amf.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/pixdesc.h"
22 #include "libavutil/mem.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/time.h"
25 #include "libavutil/avstring.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/hwcontext.h"
30 #include "compat/w32dlfcn.h"
31 #include "avfilter.h"
32 #include "filters.h"
33 #include "video.h"
34 
35 #include <AMF/core/Factory.h>
36 #include <AMF/core/Surface.h>
37 #include <AMF/components/ColorSpace.h>
38 #include <AMF/components/DisplayCapture.h>
39 
40 typedef struct AMFGrabContext {
42 
45  amf_bool duplicate_output;
47 
49 
50  AMFComponent *capture;
51  amf_bool eof;
52  AMF_SURFACE_FORMAT format;
53  void *winmmdll;
54  amf_uint32 timerPrecision;
56 
57 #define OFFSET(x) offsetof(AMFGrabContext, x)
58 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
59 
60 static const AVOption amf_capture_options[] = {
61  { "monitor_index", "Index of display monitor to capture", OFFSET(monitor_index), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 8, FLAGS },
62  { "framerate", "Capture framerate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "60"}, 0, INT_MAX, FLAGS },
63  { "duplicate_output", "Use display output duplication for screen capture", OFFSET(duplicate_output), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
64 
65  { "capture_mode", "Capture synchronization mode", OFFSET(capture_mode), AV_OPT_TYPE_INT, {.i64 = AMF_DISPLAYCAPTURE_MODE_KEEP_FRAMERATE}, 0, 2, FLAGS, "mode" },
66  { "keep_framerate", "Capture component maintains the frame rate", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_KEEP_FRAMERATE}, 0, 0, FLAGS, "mode" },
67  { "wait_for_present", "Capture component waits for flip (present) event", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_WAIT_FOR_PRESENT}, 0, 0, FLAGS, "mode" },
68  { "get_current", "Returns current visible surface immediately", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_GET_CURRENT_SURFACE}, 0, 0, FLAGS, "mode" },
69  { NULL }
70 };
71 
72 AVFILTER_DEFINE_CLASS(amf_capture);
73 
74 // need to increase precision for capture timing accuracy
75 #if defined (_WIN32)
76 
77 #include <timeapi.h>
78 
79 typedef WINMMAPI MMRESULT (WINAPI *timeBeginPeriod_fn)( UINT uPeriod);
80 typedef WINMMAPI MMRESULT (WINAPI *timeEndPeriod_fn)(UINT uPeriod);
81 
82 static void amf_increase_timer_precision(AMFGrabContext *ctx)
83 {
84  ctx->winmmdll = dlopen("Winmm.dll", 0);
85  if(ctx->winmmdll){
86  timeBeginPeriod_fn fn = (timeBeginPeriod_fn)dlsym(ctx->winmmdll, "timeBeginPeriod");
87  if(fn){
88  ctx->timerPrecision = 1;
89  while (fn(ctx->timerPrecision) == TIMERR_NOCANDO)
90  {
91  ++ctx->timerPrecision;
92  }
93  }
94  }
95 }
96 static void amf_restore_timer_precision(AMFGrabContext *ctx)
97 {
98  if(ctx->winmmdll){
99  timeEndPeriod_fn fn = (timeEndPeriod_fn)dlsym(ctx->winmmdll, "timeEndPeriod");
100  if(fn)
101  fn(ctx->timerPrecision);
102  dlclose(ctx->winmmdll);
103  ctx->winmmdll = 0;
104  }
105 }
106 #endif
107 
108 static void amf_release_surface(void *opaque, uint8_t *data)
109 {
110  int ref = 0;
111  if(!!data){
112  AMFInterface *surface = (AMFInterface*)(data);
113  if (surface && surface->pVtbl)
114  ref = surface->pVtbl->Release(surface);
115  }
116 }
117 
118 static av_cold void amf_uninit(AVFilterContext *avctx)
119 {
120  AMFGrabContext *ctx = avctx->priv;
121 
122  if (ctx->capture) {
123  ctx->capture->pVtbl->Drain(ctx->capture);
124  ctx->capture->pVtbl->Terminate(ctx->capture);
125  ctx->capture->pVtbl->Release(ctx->capture);
126  ctx->capture = NULL;
127  }
128 
129  av_buffer_unref(&ctx->device_ctx_ref);
130 #if defined (_WIN32)
131  amf_restore_timer_precision(ctx);
132 #endif
133 }
134 
135 static av_cold int amf_init(AVFilterContext *avctx)
136 {
137  AMFGrabContext *ctx = avctx->priv;
138 #if defined (_WIN32)
139  amf_increase_timer_precision(ctx);
140 #endif
141  ctx->eof = 0;
142  av_log(avctx, AV_LOG_VERBOSE, "Initializing AMF screen capture\n");
143 
144  return 0;
145 }
146 
147 static int amf_init_vsrc(AVFilterLink *outlink)
148 {
149  FilterLink *link = ff_filter_link(outlink);
150  AVFilterContext *avctx = outlink->src;
151  AMFGrabContext *ctx = avctx->priv;
152  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
153  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext*)hw_device_ctx->hwctx;
154  AMF_RESULT res;
155  AMFRate framerate;
156  AMFVariantStruct var = {0};
157  AMFSize resolution = {0};
158 
159  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory,
160  amf_device_ctx->context,
161  AMFDisplayCapture,
162  &ctx->capture);
163  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_FILTER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", AMFDisplayCapture, res);
164 
165  AMF_ASSIGN_PROPERTY_INT64(res, ctx->capture, AMF_DISPLAYCAPTURE_MONITOR_INDEX, ctx->monitor_index);
166  if (res != AMF_OK) {
167  av_log(avctx, AV_LOG_ERROR, "Failed to set monitor index: %d\n", res);
168  return AVERROR_EXTERNAL;
169  }
170 
171  if (ctx->framerate.num > 0 && ctx->framerate.den > 0)
172  framerate = AMFConstructRate(ctx->framerate.num, ctx->framerate.den);
173  else
174  framerate = AMFConstructRate(30, 1);
175 
176  AMF_ASSIGN_PROPERTY_BOOL(res, ctx->capture, AMF_DISPLAYCAPTURE_DUPLICATEOUTPUT, ctx->duplicate_output);
177  if (res != AMF_OK) {
178  av_log(avctx, AV_LOG_ERROR, "Failed to set AMF_DISPLAYCAPTURE_DUPLICATEOUTPUT: %d\n", res);
179  return AVERROR_EXTERNAL;
180  }
181 
182  AMF_ASSIGN_PROPERTY_RATE(res, ctx->capture, AMF_DISPLAYCAPTURE_FRAMERATE, framerate);
183  if (res != AMF_OK) {
184  av_log(avctx, AV_LOG_ERROR, "Failed to set framerate: %d\n", res);
185  return AVERROR_EXTERNAL;
186  }
187 
188  AMF_ASSIGN_PROPERTY_INT64(res, ctx->capture, AMF_DISPLAYCAPTURE_MODE, ctx->capture_mode);
189  if (res != AMF_OK) {
190  av_log(avctx, AV_LOG_WARNING, "Failed to set capture mode: %d\n", res);
191  }
192 
193  res = ctx->capture->pVtbl->Init(ctx->capture, AMF_SURFACE_UNKNOWN, 0, 0);
194  if (res != AMF_OK) {
195  av_log(avctx, AV_LOG_ERROR, "Failed to initialize capture component: %d\n", res);
196  return AVERROR_EXTERNAL;
197  }
198 
199  res = ctx->capture->pVtbl->GetProperty(ctx->capture, AMF_DISPLAYCAPTURE_RESOLUTION, &var);
200  if (res == AMF_OK && var.type == AMF_VARIANT_SIZE) {
201  resolution = var.sizeValue;
202  outlink->w = resolution.width;
203  outlink->h = resolution.height;
204 
205  av_log(avctx, AV_LOG_INFO, "Capture resolution: %dx%d\n",
206  outlink->w, outlink->h);
207  } else {
208  av_log(avctx, AV_LOG_ERROR, "Failed to get capture resolution from AMF\n");
209  AMFVariantClear(&var);
210  return AVERROR_EXTERNAL;
211  }
212 
213  res = ctx->capture->pVtbl->GetProperty(ctx->capture, AMF_DISPLAYCAPTURE_FORMAT, &var);
214  if (res == AMF_OK && var.type == AMF_VARIANT_INT64) {
215  ctx->format = (AMF_SURFACE_FORMAT)var.int64Value;
216  av_log(avctx, AV_LOG_INFO, "Capture format: %d\n", ctx->format);
217  } else {
218  ctx->format = AMF_SURFACE_BGRA;
219  av_log(avctx, AV_LOG_WARNING, "Failed to get format, assuming BGRA\n");
220  }
221 
222 
223  outlink->time_base = (AVRational){framerate.den, framerate.num};
224  link->frame_rate = (AVRational){framerate.num, framerate.den};
225  AMFVariantClear(&var);
226  return 0;
227 }
228 
229 static int amf_config_props(AVFilterLink *outlink)
230 {
231  FilterLink *link = ff_filter_link(outlink);
232  AVFilterContext *avctx = outlink->src;
233  AMFGrabContext *ctx = avctx->priv;
234  AVHWDeviceContext *device_ctx;
235  int ret;
236  int pool_size = 1;
237 
238  av_buffer_unref(&ctx->device_ctx_ref);
239 
240  if (avctx->hw_device_ctx) {
241  device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
242  if (device_ctx->type == AV_HWDEVICE_TYPE_AMF)
243  {
244  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
245  } else {
247  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
248  }
249  } else {
250  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
251  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
252  }
253  if ((ret = amf_init_vsrc(outlink)) == 0) {
254  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
255  if (device_ctx->type == AV_HWDEVICE_TYPE_AMF) {
256  AVHWFramesContext *frames_ctx;
257  link->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->device_ctx_ref);
258  AMF_GOTO_FAIL_IF_FALSE(avctx, !!link->hw_frames_ctx, AVERROR(ENOMEM), "av_hwframe_ctx_alloc failed\n");
259 
260  frames_ctx = (AVHWFramesContext*)link->hw_frames_ctx->data;
261  frames_ctx->format = AV_PIX_FMT_AMF_SURFACE;
262  frames_ctx->sw_format = av_amf_to_av_format(ctx->format);
263  frames_ctx->initial_pool_size = pool_size;
264  if (avctx->extra_hw_frames > 0)
265  frames_ctx->initial_pool_size += avctx->extra_hw_frames;
266 
267  frames_ctx->width = outlink->w;
268  frames_ctx->height = outlink->h;
269 
271  if (ret < 0) {
272  av_log(avctx, AV_LOG_ERROR, "Failed to initialize hardware frames context: %s\n",
273  av_err2str(ret));
274 
275  return ret;
276  }
277 
278  if (!link->hw_frames_ctx)
279  return AVERROR(ENOMEM);
280  }
281  return 0;
282  }
283 fail:
284  amf_uninit(avctx);
285  return ret;
286 }
287 
288 static int amf_capture_frame(AVFilterLink *outlink)
289 {
290  AVFilterContext *avctx = outlink->src;
291  AMFGrabContext *ctx = avctx->priv;
292  AMFSurface *surface = NULL;
293  AVFrame *frame = NULL;
294  AMF_RESULT res;
295  AMFData *data_out = NULL;
296  FilterLink *fl = ff_filter_link(outlink);
297  int format_amf;
298  int i;
299  int ret;
300  AMFPlane *plane;
301 
302  if (ctx->eof)
303  return AVERROR_EOF;
304 
305  res = ctx->capture->pVtbl->QueryOutput(ctx->capture, &data_out);
306 
307  if (res == AMF_REPEAT) {
308  av_log(0, AV_LOG_DEBUG, "AMF capture returned res = AMF_REPEAT\n");
309  return AVERROR(EAGAIN);
310  }
311 
312  if (res == AMF_EOF) {
313  ctx->eof = 1;
314  av_log(avctx, AV_LOG_DEBUG, "Capture reached EOF\n");
315  return AVERROR_EOF;
316  }
317 
318  if (res != AMF_OK || !data_out) {
319  if (res != AMF_OK)
320  av_log(avctx, AV_LOG_WARNING, "QueryOutput failed: %d\n", res);
321 
322  return AVERROR(EAGAIN);
323  }
324 
325  AMFGuid guid = IID_AMFSurface();
326  ret = data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface);
327  data_out->pVtbl->Release(data_out);
328  if (ret != AMF_OK || !surface) {
329  av_log(avctx, AV_LOG_ERROR, "QueryInterface(IID_AMFSurface) failed: %d\n", ret);
330  return AVERROR(EAGAIN);
331  }
332 
333  frame = av_frame_alloc();
334  if (!frame) {
335  surface->pVtbl->Release(surface);
336  return AVERROR(ENOMEM);
337  }
338  frame->format = outlink->format;
339  frame->width = outlink->w;
340  frame->height = outlink->h;
341  frame->sample_aspect_ratio = (AVRational){1, 1};
342 
343  amf_pts pts = surface->pVtbl->GetPts(surface);
344  frame->pts = av_rescale_q(pts, AMF_TIME_BASE_Q, outlink->time_base);
345 
346  if (fl->hw_frames_ctx) {
347  frame->format = AV_PIX_FMT_AMF_SURFACE;
348  frame->data[0] = (uint8_t*)surface;
349  frame->buf[0] = av_buffer_create((uint8_t*)surface, sizeof(surface),
351  frame->hw_frames_ctx = av_buffer_ref(fl->hw_frames_ctx);
352  if (!frame->buf[0]) {
354  surface->pVtbl->Release(surface);
355  return AVERROR(ENOMEM);
356  }
357  } else {
358  ret = surface->pVtbl->Convert(surface, AMF_MEMORY_HOST);
359  AMF_RETURN_IF_FALSE(avctx, ret == AMF_OK, AVERROR_UNKNOWN, "Convert(amf::AMF_MEMORY_HOST) failed with error %d\n", ret);
360 
361  for (i = 0; i < surface->pVtbl->GetPlanesCount(surface); i++) {
362  plane = surface->pVtbl->GetPlaneAt(surface, i);
363  frame->data[i] = plane->pVtbl->GetNative(plane);
364  frame->linesize[i] = plane->pVtbl->GetHPitch(plane);
365  }
366 
367  frame->buf[0] = av_buffer_create((uint8_t *)surface, sizeof(surface),
368  amf_release_surface, (void*)avctx,
370  AMF_RETURN_IF_FALSE(avctx, !!frame->buf[0], AVERROR(ENOMEM), "av_buffer_create for amf surface failed.");
371 
372  format_amf = surface->pVtbl->GetFormat(surface);
373  frame->format = av_amf_to_av_format(format_amf);
374  }
375 
376  return ff_filter_frame(outlink, frame);
377 }
378 
379 static const AVFilterPad amf_outputs[] = {
380  {
381  .name = "default",
382  .type = AVMEDIA_TYPE_VIDEO,
383  .request_frame = amf_capture_frame,
384  .config_props = amf_config_props,
385  },
386 };
387 
389  .p.name = "vsrc_amf",
390  .p.description = NULL_IF_CONFIG_SMALL("AMD AMF screen capture"),
391  .p.priv_class = &amf_capture_class,
392  .p.inputs = NULL,
393  .p.flags = AVFILTER_FLAG_HWDEVICE,
394  .priv_size = sizeof(AMFGrabContext),
395  .init = amf_init,
396  .uninit = amf_uninit,
399  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
400 };
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
Underlying C type is AVRational.
Definition: opt.h:315
FLAGS
#define FLAGS
Definition: vsrc_amf.c:58
AMFGrabContext::timerPrecision
amf_uint32 timerPrecision
Definition: vsrc_amf.c:54
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AMFGrabContext::framerate
AVRational framerate
Definition: vsrc_amf.c:44
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
amf_config_props
static int amf_config_props(AVFilterLink *outlink)
Definition: vsrc_amf.c:229
AVOption
AVOption.
Definition: opt.h:429
data
const char data[16]
Definition: mxf.c:149
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
video.h
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AMFGrabContext::capture
AMFComponent * capture
Definition: vsrc_amf.c:50
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:216
resolution
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this the other leaves the full chroma resolution
Definition: swscale.txt:54
AMFGrabContext::winmmdll
void * winmmdll
Definition: vsrc_amf.c:53
av_amf_to_av_format
enum AVPixelFormat av_amf_to_av_format(enum AMF_SURFACE_FORMAT fmt)
Definition: hwcontext_amf.c:142
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:380
pts
static int64_t pts
Definition: transcode_aac.c:644
amf_capture_frame
static int amf_capture_frame(AVFilterLink *outlink)
Definition: vsrc_amf.c:288
amf_outputs
static const AVFilterPad amf_outputs[]
Definition: vsrc_amf.c:379
AMFGrabContext
Definition: vsrc_amf.c:40
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
fn
Definition: ops_tmpl_float.c:122
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
av_cold
#define av_cold
Definition: attributes.h:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:266
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AMFGrabContext::device_ctx_ref
AVBufferRef * device_ctx_ref
Definition: vsrc_amf.c:48
amf_init_vsrc
static int amf_init_vsrc(AVFilterLink *outlink)
Definition: vsrc_amf.c:147
filters.h
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
hwcontext_amf.h
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
AMFGrabContext::capture_mode
int capture_mode
Definition: vsrc_amf.c:46
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AMF_TIME_BASE_Q
#define AMF_TIME_BASE_Q
Definition: hwcontext_amf_internal.h:41
AMFGrabContext::duplicate_output
amf_bool duplicate_output
Definition: vsrc_amf.c:45
if
if(ret)
Definition: filter_design.txt:179
AMFGrabContext::monitor_index
int monitor_index
Definition: vsrc_amf.c:43
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
AMFGrabContext::format
AMF_SURFACE_FORMAT format
Definition: vsrc_amf.c:52
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
time.h
OFFSET
#define OFFSET(x)
Definition: vsrc_amf.c:57
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:207
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:550
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(amf_capture)
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:188
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:33
fn
#define fn(a)
Definition: aap_template.c:37
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
amf_uninit
static av_cold void amf_uninit(AVFilterContext *avctx)
Definition: vsrc_amf.c:118
uninit
static void uninit(AVBSFContext *ctx)
Definition: pcm_rechunk.c:68
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:724
amf_init
static av_cold int amf_init(AVFilterContext *avctx)
Definition: vsrc_amf.c:135
hwcontext_amf_internal.h
AMFGrabContext::avclass
AVClass * avclass
Definition: vsrc_amf.c:41
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
AVERROR_FILTER_NOT_FOUND
#define AVERROR_FILTER_NOT_FOUND
Filter not found.
Definition: error.h:60
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
amf_release_surface
static void amf_release_surface(void *opaque, uint8_t *data)
Definition: vsrc_amf.c:108
ff_vsrc_amf_capture
const FFFilter ff_vsrc_amf_capture
Definition: vsrc_amf.c:388
amf_capture_options
static const AVOption amf_capture_options[]
Definition: vsrc_amf.c:60
avstring.h
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:253
w32dlfcn.h
AMFGrabContext::eof
amf_bool eof
Definition: vsrc_amf.c:51