FFmpeg
vsrc_ddagrab.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0A00
22 #undef _WIN32_WINNT
23 #define _WIN32_WINNT 0x0A00
24 #endif
25 #define WIN32_LEAN_AND_MEAN
26 
27 #include <windows.h>
28 
29 #define COBJMACROS
30 
31 #include <initguid.h>
32 #include <d3d11.h>
33 #include <dxgi1_2.h>
34 #if HAVE_IDXGIOUTPUT5
35 #include <dxgi1_5.h>
36 #endif
37 
38 #include "libavutil/opt.h"
39 #include "libavutil/time.h"
40 #include "libavutil/avstring.h"
41 #include "libavutil/avassert.h"
42 #include "libavutil/hwcontext.h"
44 #include "compat/w32dlfcn.h"
45 #include "avfilter.h"
46 #include "internal.h"
47 #include "formats.h"
48 #include "video.h"
49 
50 #include "vsrc_ddagrab_shaders.h"
51 
52 // avutil/time.h takes and returns time in microseconds
53 #define TIMER_RES 1000000
54 #define TIMER_RES64 INT64_C(1000000)
55 
56 typedef struct DdagrabContext {
57  const AVClass *class;
58 
62 
66 
67  DXGI_OUTPUT_DESC output_desc;
68  IDXGIOutputDuplication *dxgi_outdupl;
70 
72  ID3D11Texture2D *mouse_texture;
73  ID3D11ShaderResourceView* mouse_resource_view ;
74 
76  int64_t time_frame;
77  int64_t time_timeout;
78  int64_t first_pts;
79 
80  DXGI_FORMAT raw_format;
81  int raw_width;
83 
84  ID3D11Texture2D *probed_texture;
85 
86  ID3D11VertexShader *vertex_shader;
87  ID3D11InputLayout *input_layout;
88  ID3D11PixelShader *pixel_shader;
89  ID3D11Buffer *const_buffer;
90  ID3D11SamplerState *sampler_state;
91  ID3D11BlendState *blend_state;
92 
96  int width;
97  int height;
98  int offset_x;
99  int offset_y;
100  int out_fmt;
104 
105 #define OFFSET(x) offsetof(DdagrabContext, x)
106 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
107 static const AVOption ddagrab_options[] = {
108  { "output_idx", "dda output index to capture", OFFSET(output_idx), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS },
109  { "draw_mouse", "draw the mouse pointer", OFFSET(draw_mouse), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
110  { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "30" }, 0, INT_MAX, FLAGS },
111  { "video_size", "set video frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, { .str = NULL }, 0, 0, FLAGS },
112  { "offset_x", "capture area x offset", OFFSET(offset_x), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
113  { "offset_y", "capture area y offset", OFFSET(offset_y), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
114  { "output_fmt", "desired output format", OFFSET(out_fmt), AV_OPT_TYPE_INT, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, "output_fmt" },
115  { "auto", "let dda pick its preferred format", 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, INT_MAX, FLAGS, "output_fmt" },
116  { "8bit", "only output default 8 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, "output_fmt" },
117  { "bgra", "only output 8 Bit BGRA", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, "output_fmt" },
118  { "10bit", "only output default 10 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, "output_fmt" },
119  { "x2bgr10", "only output 10 Bit X2BGR10", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, "output_fmt" },
120  { "16bit", "only output default 16 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, "output_fmt" },
121  { "rgbaf16", "only output 16 Bit RGBAF16", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, "output_fmt" },
122  { "allow_fallback", "don't error on fallback to default 8 Bit format",
123  OFFSET(allow_fallback), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
124  { "force_fmt", "exclude BGRA from format list (experimental, discouraged by Microsoft)",
125  OFFSET(force_fmt), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
126  { NULL }
127 };
128 
129 AVFILTER_DEFINE_CLASS(ddagrab);
130 
131 static inline void release_resource(void *resource)
132 {
133  IUnknown **resp = (IUnknown**)resource;
134  if (*resp) {
135  IUnknown_Release(*resp);
136  *resp = NULL;
137  }
138 }
139 
141 {
142  DdagrabContext *dda = avctx->priv;
143 
150 
152 
156 
157  av_frame_free(&dda->last_frame);
160 }
161 
163 {
164  DdagrabContext *dda = avctx->priv;
165  IDXGIDevice *dxgi_device = NULL;
166  IDXGIAdapter *dxgi_adapter = NULL;
167  IDXGIOutput *dxgi_output = NULL;
168  IDXGIOutput1 *dxgi_output1 = NULL;
169 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
170  IDXGIOutput5 *dxgi_output5 = NULL;
171 
172  typedef DPI_AWARENESS_CONTEXT (*set_thread_dpi_t)(DPI_AWARENESS_CONTEXT);
173  set_thread_dpi_t set_thread_dpi;
174  HMODULE user32_module;
175 #endif
176  int w, h;
177  HRESULT hr;
178 
179  hr = ID3D11Device_QueryInterface(dda->device_hwctx->device, &IID_IDXGIDevice, (void**)&dxgi_device);
180  if (FAILED(hr)) {
181  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIDevice\n");
182  return AVERROR_EXTERNAL;
183  }
184 
185  hr = IDXGIDevice_GetParent(dxgi_device, &IID_IDXGIAdapter, (void**)&dxgi_adapter);
186  IDXGIDevice_Release(dxgi_device);
187  dxgi_device = NULL;
188  if (FAILED(hr)) {
189  av_log(avctx, AV_LOG_ERROR, "Failed getting parent IDXGIAdapter\n");
190  return AVERROR_EXTERNAL;
191  }
192 
193  hr = IDXGIAdapter_EnumOutputs(dxgi_adapter, dda->output_idx, &dxgi_output);
194  IDXGIAdapter_Release(dxgi_adapter);
195  dxgi_adapter = NULL;
196  if (FAILED(hr)) {
197  av_log(avctx, AV_LOG_ERROR, "Failed to enumerate DXGI output %d\n", dda->output_idx);
198  return AVERROR_EXTERNAL;
199  }
200 
201  hr = IDXGIOutput_GetDesc(dxgi_output, &dda->output_desc);
202  if (FAILED(hr)) {
203  IDXGIOutput_Release(dxgi_output);
204  av_log(avctx, AV_LOG_ERROR, "Failed getting output description\n");
205  return AVERROR_EXTERNAL;
206  }
207 
208 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
209  user32_module = dlopen("user32.dll", 0);
210  if (!user32_module) {
211  av_log(avctx, AV_LOG_ERROR, "Failed loading user32.dll\n");
212  return AVERROR_EXTERNAL;
213  }
214 
215  set_thread_dpi = (set_thread_dpi_t)dlsym(user32_module, "SetThreadDpiAwarenessContext");
216 
217  if (set_thread_dpi)
218  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput5, (void**)&dxgi_output5);
219 
220  if (set_thread_dpi && SUCCEEDED(hr)) {
221  DPI_AWARENESS_CONTEXT prev_dpi_ctx;
222  DXGI_FORMAT formats[] = {
224  DXGI_FORMAT_R10G10B10A2_UNORM,
226  };
227  int nb_formats = FF_ARRAY_ELEMS(formats);
228 
229  if(dda->out_fmt == DXGI_FORMAT_B8G8R8A8_UNORM) {
231  nb_formats = 1;
232  } else if (dda->out_fmt) {
233  formats[0] = dda->out_fmt;
235  nb_formats = dda->force_fmt ? 1 : 2;
236  }
237 
238  IDXGIOutput_Release(dxgi_output);
239  dxgi_output = NULL;
240 
241  prev_dpi_ctx = set_thread_dpi(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2);
242  if (!prev_dpi_ctx)
243  av_log(avctx, AV_LOG_WARNING, "Failed enabling DPI awareness for DDA\n");
244 
245  hr = IDXGIOutput5_DuplicateOutput1(dxgi_output5,
246  (IUnknown*)dda->device_hwctx->device,
247  0,
248  nb_formats,
249  formats,
250  &dda->dxgi_outdupl);
251  IDXGIOutput5_Release(dxgi_output5);
252  dxgi_output5 = NULL;
253 
254  if (prev_dpi_ctx)
255  set_thread_dpi(prev_dpi_ctx);
256 
257  dlclose(user32_module);
258  user32_module = NULL;
259  set_thread_dpi = NULL;
260 
261  av_log(avctx, AV_LOG_DEBUG, "Using IDXGIOutput5 interface\n");
262  } else {
263  dlclose(user32_module);
264  user32_module = NULL;
265  set_thread_dpi = NULL;
266 
267  av_log(avctx, AV_LOG_DEBUG, "Falling back to IDXGIOutput1\n");
268 #else
269  {
270 #endif
271  if (dda->out_fmt && dda->out_fmt != DXGI_FORMAT_B8G8R8A8_UNORM && (!dda->allow_fallback || dda->force_fmt)) {
272  av_log(avctx, AV_LOG_ERROR, "Only 8 bit output supported with legacy API\n");
273  return AVERROR(ENOTSUP);
274  }
275 
276  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput1, (void**)&dxgi_output1);
277  IDXGIOutput_Release(dxgi_output);
278  dxgi_output = NULL;
279  if (FAILED(hr)) {
280  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIOutput1\n");
281  return AVERROR_EXTERNAL;
282  }
283 
284  hr = IDXGIOutput1_DuplicateOutput(dxgi_output1,
285  (IUnknown*)dda->device_hwctx->device,
286  &dda->dxgi_outdupl);
287  IDXGIOutput1_Release(dxgi_output1);
288  dxgi_output1 = NULL;
289  }
290 
291  if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE) {
292  av_log(avctx, AV_LOG_ERROR, "Too many open duplication sessions\n");
293  return AVERROR(EBUSY);
294  } else if (hr == DXGI_ERROR_UNSUPPORTED) {
295  av_log(avctx, AV_LOG_ERROR, "Selected output not supported\n");
296  return AVERROR_EXTERNAL;
297  } else if (hr == E_INVALIDARG) {
298  av_log(avctx, AV_LOG_ERROR, "Invalid output duplication argument\n");
299  return AVERROR(EINVAL);
300  } else if (hr == E_ACCESSDENIED) {
301  av_log(avctx, AV_LOG_ERROR, "Desktop duplication access denied\n");
302  return AVERROR(EPERM);
303  } else if (FAILED(hr)) {
304  av_log(avctx, AV_LOG_ERROR, "Failed duplicating output\n");
305  return AVERROR_EXTERNAL;
306  }
307 
308  w = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left;
309  h = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top;
310  av_log(avctx, AV_LOG_VERBOSE, "Opened dxgi output %d with dimensions %dx%d\n", dda->output_idx, w, h);
311 
312  return 0;
313 }
314 
315 typedef struct ConstBufferData
316 {
317  float width;
318  float height;
319 
320  uint64_t padding;
322 
323 static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[] =
324 {
325  { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
326  { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
327 };
328 
330 {
331  DdagrabContext *dda = avctx->priv;
332  ID3D11Device *dev = dda->device_hwctx->device;
333  D3D11_SAMPLER_DESC sampler_desc = { 0 };
334  D3D11_BLEND_DESC blend_desc = { 0 };
335  D3D11_BUFFER_DESC buffer_desc = { 0 };
336  D3D11_SUBRESOURCE_DATA buffer_data = { 0 };
337  ConstBufferData const_data = { 0 };
338  HRESULT hr;
339 
340  hr = ID3D11Device_CreateVertexShader(dev,
343  NULL,
344  &dda->vertex_shader);
345  if (FAILED(hr)) {
346  av_log(avctx, AV_LOG_ERROR, "CreateVertexShader failed: %lx\n", hr);
347  return AVERROR_EXTERNAL;
348  }
349 
350  hr = ID3D11Device_CreateInputLayout(dev,
355  &dda->input_layout);
356  if (FAILED(hr)) {
357  av_log(avctx, AV_LOG_ERROR, "CreateInputLayout failed: %lx\n", hr);
358  return AVERROR_EXTERNAL;
359  }
360 
361  hr = ID3D11Device_CreatePixelShader(dev,
364  NULL,
365  &dda->pixel_shader);
366  if (FAILED(hr)) {
367  av_log(avctx, AV_LOG_ERROR, "CreatePixelShader failed: %lx\n", hr);
368  return AVERROR_EXTERNAL;
369  }
370 
371  const_data = (ConstBufferData){ dda->width, dda->height };
372 
373  buffer_data.pSysMem = &const_data;
374  buffer_desc.ByteWidth = sizeof(const_data);
375  buffer_desc.Usage = D3D11_USAGE_IMMUTABLE;
376  buffer_desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
377  hr = ID3D11Device_CreateBuffer(dev,
378  &buffer_desc,
379  &buffer_data,
380  &dda->const_buffer);
381  if (FAILED(hr)) {
382  av_log(avctx, AV_LOG_ERROR, "CreateBuffer const buffer failed: %lx\n", hr);
383  return AVERROR_EXTERNAL;
384  }
385 
386  sampler_desc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
387  sampler_desc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
388  sampler_desc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
389  sampler_desc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
390  sampler_desc.ComparisonFunc = D3D11_COMPARISON_NEVER;
391  hr = ID3D11Device_CreateSamplerState(dev,
392  &sampler_desc,
393  &dda->sampler_state);
394  if (FAILED(hr)) {
395  av_log(avctx, AV_LOG_ERROR, "CreateSamplerState failed: %lx\n", hr);
396  return AVERROR_EXTERNAL;
397  }
398 
399  blend_desc.AlphaToCoverageEnable = FALSE;
400  blend_desc.IndependentBlendEnable = FALSE;
401  blend_desc.RenderTarget[0].BlendEnable = TRUE;
402  blend_desc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
403  blend_desc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
404  blend_desc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
405  blend_desc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
406  blend_desc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
407  blend_desc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
408  blend_desc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
409  hr = ID3D11Device_CreateBlendState(dev,
410  &blend_desc,
411  &dda->blend_state);
412  if (FAILED(hr)) {
413  av_log(avctx, AV_LOG_ERROR, "CreateBlendState failed: %lx\n", hr);
414  return AVERROR_EXTERNAL;
415  }
416 
417  return 0;
418 }
419 
421 {
422  DdagrabContext *dda = avctx->priv;
423 
424  dda->last_frame = av_frame_alloc();
425  if (!dda->last_frame)
426  return AVERROR(ENOMEM);
427 
428  dda->mouse_x = -1;
429  dda->mouse_y = -1;
430 
431  return 0;
432 }
433 
435  uint8_t *buf,
436  DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info,
437  ID3D11Texture2D **out_tex,
438  ID3D11ShaderResourceView **res_view)
439 {
440  DdagrabContext *dda = avctx->priv;
441  D3D11_TEXTURE2D_DESC desc = { 0 };
442  D3D11_SUBRESOURCE_DATA init_data = { 0 };
443  D3D11_SHADER_RESOURCE_VIEW_DESC resource_desc = { 0 };
444  HRESULT hr;
445 
446  desc.MipLevels = 1;
447  desc.ArraySize = 1;
449  desc.SampleDesc.Count = 1;
450  desc.SampleDesc.Quality = 0;
451  desc.Usage = D3D11_USAGE_IMMUTABLE;
452  desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
453 
454  desc.Width = shape_info->Width;
455  desc.Height = shape_info->Height;
456 
457  init_data.pSysMem = buf;
458  init_data.SysMemPitch = shape_info->Pitch;
459 
460  resource_desc.Format = desc.Format;
461  resource_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
462  resource_desc.Texture2D.MostDetailedMip = 0;
463  resource_desc.Texture2D.MipLevels = 1;
464 
465  hr = ID3D11Device_CreateTexture2D(dda->device_hwctx->device,
466  &desc,
467  &init_data,
468  out_tex);
469  if (FAILED(hr)) {
470  av_log(avctx, AV_LOG_ERROR, "Failed creating pointer texture\n");
471  return AVERROR_EXTERNAL;
472  }
473 
474  hr = ID3D11Device_CreateShaderResourceView(dda->device_hwctx->device,
475  (ID3D11Resource*)dda->mouse_texture,
476  &resource_desc,
477  res_view);
478  if (FAILED(hr)) {
479  release_resource(out_tex);
480  av_log(avctx, AV_LOG_ERROR, "CreateShaderResourceView for mouse failed: %lx\n", hr);
481  return AVERROR_EXTERNAL;
482  }
483 
484  return 0;
485 }
486 
487 static uint8_t *convert_mono_buffer(uint8_t *input, int *_width, int *_height, int *_pitch)
488 {
489  int width = *_width, height = *_height, pitch = *_pitch;
490  int real_height = height / 2;
491  uint8_t *output = av_malloc(real_height * width * 4);
492  int y, x;
493 
494  if (!output)
495  return NULL;
496 
497  // This simulates drawing the cursor on a full black surface
498  // i.e. ignore the AND mask, turn XOR mask into all 4 color channels
499  for (y = 0; y < real_height; y++) {
500  for (x = 0; x < width; x++) {
501  int v = input[(real_height + y) * pitch + (x / 8)];
502  v = (v >> (7 - (x % 8))) & 1;
503  memset(&output[4 * ((y*width) + x)], v ? 0xFF : 0, 4);
504  }
505  }
506 
507  *_pitch = width * 4;
508  *_height = real_height;
509 
510  return output;
511 }
512 
513 static void fixup_color_mask(uint8_t *buf, int width, int height, int pitch)
514 {
515  int x, y;
516  // There is no good way to replicate XOR'ig parts of the texture with the screen
517  // best effort is rendering the non-masked parts, and make the rest transparent
518  for (y = 0; y < height; y++) {
519  for (x = 0; x < width; x++) {
520  int pos = (y*pitch) + (4*x) + 3;
521  buf[pos] = buf[pos] ? 0 : 0xFF;
522  }
523  }
524 }
525 
526 static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
527 {
528  DdagrabContext *dda = avctx->priv;
529  HRESULT hr;
530  int ret;
531 
532  if (frame_info->LastMouseUpdateTime.QuadPart == 0)
533  return 0;
534 
535  if (frame_info->PointerPosition.Visible) {
536  dda->mouse_x = frame_info->PointerPosition.Position.x;
537  dda->mouse_y = frame_info->PointerPosition.Position.y;
538  } else {
539  dda->mouse_x = dda->mouse_y = -1;
540  }
541 
542  if (frame_info->PointerShapeBufferSize) {
543  UINT size = frame_info->PointerShapeBufferSize;
544  DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info;
545  uint8_t *buf = av_malloc(size);
546  if (!buf)
547  return AVERROR(ENOMEM);
548 
549  hr = IDXGIOutputDuplication_GetFramePointerShape(dda->dxgi_outdupl,
550  size,
551  buf,
552  &size,
553  &shape_info);
554  if (FAILED(hr)) {
555  av_free(buf);
556  av_log(avctx, AV_LOG_ERROR, "Failed getting pointer shape: %lx\n", hr);
557  return AVERROR_EXTERNAL;
558  }
559 
560  if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME) {
561  uint8_t *new_buf = convert_mono_buffer(buf, &shape_info.Width, &shape_info.Height, &shape_info.Pitch);
562  av_free(buf);
563  if (!new_buf)
564  return AVERROR(ENOMEM);
565  buf = new_buf;
566  } else if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR) {
567  fixup_color_mask(buf, shape_info.Width, shape_info.Height, shape_info.Pitch);
568  } else if (shape_info.Type != DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR) {
569  av_log(avctx, AV_LOG_WARNING, "Unsupported pointer shape type: %d\n", (int)shape_info.Type);
570  av_free(buf);
571  return 0;
572  }
573 
576 
577  ret = create_d3d11_pointer_tex(avctx, buf, &shape_info, &dda->mouse_texture, &dda->mouse_resource_view);
578  av_freep(&buf);
579  if (ret < 0)
580  return ret;
581 
582  av_log(avctx, AV_LOG_VERBOSE, "Updated pointer shape texture\n");
583  }
584 
585  return 0;
586 }
587 
588 static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture)
589 {
590  DXGI_OUTDUPL_FRAME_INFO frame_info;
591  DdagrabContext *dda = avctx->priv;
592  IDXGIResource *desktop_resource = NULL;
593  HRESULT hr;
594  int ret;
595 
596  hr = IDXGIOutputDuplication_AcquireNextFrame(
597  dda->dxgi_outdupl,
598  dda->time_timeout,
599  &frame_info,
600  &desktop_resource);
601  if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
602  return AVERROR(EAGAIN);
603  } else if (FAILED(hr)) {
604  av_log(avctx, AV_LOG_ERROR, "AcquireNextFrame failed: %lx\n", hr);
605  return AVERROR_EXTERNAL;
606  }
607 
608  if (dda->draw_mouse) {
609  ret = update_mouse_pointer(avctx, &frame_info);
610  if (ret < 0)
611  return ret;
612  }
613 
614  hr = IDXGIResource_QueryInterface(desktop_resource, &IID_ID3D11Texture2D, (void**)desktop_texture);
615  IDXGIResource_Release(desktop_resource);
616  desktop_resource = NULL;
617  if (FAILED(hr)) {
618  av_log(avctx, AV_LOG_ERROR, "DXGIResource QueryInterface failed\n");
619  return AVERROR_EXTERNAL;
620  }
621 
622  return 0;
623 }
624 
626 {
627  DdagrabContext *dda = avctx->priv;
628  D3D11_TEXTURE2D_DESC desc;
629  int ret;
630 
631  av_assert1(!dda->probed_texture);
632 
633  do {
634  ret = next_frame_internal(avctx, &dda->probed_texture);
635  } while(ret == AVERROR(EAGAIN));
636  if (ret < 0)
637  return ret;
638 
639  ID3D11Texture2D_GetDesc(dda->probed_texture, &desc);
640 
641  dda->raw_format = desc.Format;
642  dda->raw_width = desc.Width;
643  dda->raw_height = desc.Height;
644 
645  if (dda->width <= 0)
646  dda->width = dda->raw_width;
647  if (dda->height <= 0)
648  dda->height = dda->raw_height;
649 
650  return 0;
651 }
652 
654 {
655  DdagrabContext *dda = avctx->priv;
656  int ret = 0;
657 
659  if (!dda->frames_ref)
660  return AVERROR(ENOMEM);
663 
665  dda->frames_ctx->width = dda->width;
666  dda->frames_ctx->height = dda->height;
667 
668  switch (dda->raw_format) {
670  av_log(avctx, AV_LOG_VERBOSE, "Probed 8 bit RGB frame format\n");
672  break;
673  case DXGI_FORMAT_R10G10B10A2_UNORM:
674  av_log(avctx, AV_LOG_VERBOSE, "Probed 10 bit RGB frame format\n");
676  break;
678  av_log(avctx, AV_LOG_VERBOSE, "Probed 16 bit float RGB frame format\n");
680  break;
681  default:
682  av_log(avctx, AV_LOG_ERROR, "Unexpected texture output format!\n");
683  return AVERROR_BUG;
684  }
685 
686  if (dda->draw_mouse)
687  dda->frames_hwctx->BindFlags |= D3D11_BIND_RENDER_TARGET;
688 
690  if (ret < 0) {
691  av_log(avctx, AV_LOG_ERROR, "Failed to initialise hardware frames context: %d.\n", ret);
692  goto fail;
693  }
694 
695  return 0;
696 fail:
698  return ret;
699 }
700 
701 static int ddagrab_config_props(AVFilterLink *outlink)
702 {
703  AVFilterContext *avctx = outlink->src;
704  DdagrabContext *dda = avctx->priv;
705  int ret;
706 
707  if (avctx->hw_device_ctx) {
709 
711  av_log(avctx, AV_LOG_ERROR, "Non-D3D11VA input hw_device_ctx\n");
712  return AVERROR(EINVAL);
713  }
714 
715  dda->device_ref = av_buffer_ref(avctx->hw_device_ctx);
716  if (!dda->device_ref)
717  return AVERROR(ENOMEM);
718 
719  av_log(avctx, AV_LOG_VERBOSE, "Using provided hw_device_ctx\n");
720  } else {
722  if (ret < 0) {
723  av_log(avctx, AV_LOG_ERROR, "Failed to create D3D11VA device.\n");
724  return ret;
725  }
726 
728 
729  av_log(avctx, AV_LOG_VERBOSE, "Created internal hw_device_ctx\n");
730  }
731 
733 
734  ret = init_dxgi_dda(avctx);
735  if (ret < 0)
736  return ret;
737 
738  ret = probe_output_format(avctx);
739  if (ret < 0)
740  return ret;
741 
742  if (dda->out_fmt && dda->raw_format != dda->out_fmt && (!dda->allow_fallback || dda->force_fmt)) {
743  av_log(avctx, AV_LOG_ERROR, "Requested output format unavailable.\n");
744  return AVERROR(ENOTSUP);
745  }
746 
747  dda->width -= FFMAX(dda->width - dda->raw_width + dda->offset_x, 0);
748  dda->height -= FFMAX(dda->height - dda->raw_height + dda->offset_y, 0);
749 
750  dda->time_base = av_inv_q(dda->framerate);
752  dda->time_timeout = av_rescale_q(1, dda->time_base, (AVRational) { 1, 1000 }) / 2;
753 
754  if (dda->draw_mouse) {
755  ret = init_render_resources(avctx);
756  if (ret < 0)
757  return ret;
758  }
759 
760  ret = init_hwframes_ctx(avctx);
761  if (ret < 0)
762  return ret;
763 
764  outlink->hw_frames_ctx = av_buffer_ref(dda->frames_ref);
765  if (!outlink->hw_frames_ctx)
766  return AVERROR(ENOMEM);
767 
768  outlink->w = dda->width;
769  outlink->h = dda->height;
770  outlink->time_base = (AVRational){1, TIMER_RES};
771  outlink->frame_rate = dda->framerate;
772 
773  return 0;
774 }
775 
777 {
778  DdagrabContext *dda = avctx->priv;
779  ID3D11DeviceContext *devctx = dda->device_hwctx->device_context;
780  ID3D11Texture2D *frame_tex = (ID3D11Texture2D*)frame->data[0];
781  D3D11_RENDER_TARGET_VIEW_DESC target_desc = { 0 };
782  ID3D11RenderTargetView* target_view = NULL;
783  ID3D11Buffer *mouse_vertex_buffer = NULL;
784  D3D11_TEXTURE2D_DESC tex_desc;
785  int num_vertices = 0;
786  int x, y;
787  HRESULT hr;
788  int ret = 0;
789 
790  if (!dda->mouse_texture || dda->mouse_x < 0 || dda->mouse_y < 0)
791  return 0;
792 
793  ID3D11Texture2D_GetDesc(dda->mouse_texture, &tex_desc);
794 
795  x = dda->mouse_x - dda->offset_x;
796  y = dda->mouse_y - dda->offset_y;
797 
798  if (x >= dda->width || y >= dda->height ||
799  -x >= (int)tex_desc.Width || -y >= (int)tex_desc.Height)
800  return 0;
801 
802  target_desc.Format = dda->raw_format;
803  target_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
804  target_desc.Texture2D.MipSlice = 0;
805 
806  hr = ID3D11Device_CreateRenderTargetView(dda->device_hwctx->device,
807  (ID3D11Resource*)frame_tex,
808  &target_desc,
809  &target_view);
810  if (FAILED(hr)) {
811  av_log(avctx, AV_LOG_ERROR, "CreateRenderTargetView failed: %lx\n", hr);
813  goto end;
814  }
815 
816  ID3D11DeviceContext_ClearState(devctx);
817 
818  {
819  D3D11_VIEWPORT viewport = { 0 };
820  viewport.Width = dda->width;
821  viewport.Height = dda->height;
822  viewport.MinDepth = 0.0f;
823  viewport.MaxDepth = 1.0f;
824 
825  ID3D11DeviceContext_RSSetViewports(devctx, 1, &viewport);
826  }
827 
828  {
829  FLOAT vertices[] = {
830  // x, y, z, u, v
831  x , y + tex_desc.Height, 0.0f, 0.0f, 1.0f,
832  x , y , 0.0f, 0.0f, 0.0f,
833  x + tex_desc.Width, y + tex_desc.Height, 0.0f, 1.0f, 1.0f,
834  x + tex_desc.Width, y , 0.0f, 1.0f, 0.0f,
835  };
836  UINT stride = sizeof(FLOAT) * 5;
837  UINT offset = 0;
838 
839  D3D11_SUBRESOURCE_DATA init_data = { 0 };
840  D3D11_BUFFER_DESC buf_desc = { 0 };
841 
842  num_vertices = sizeof(vertices) / (sizeof(FLOAT) * 5);
843 
844  buf_desc.Usage = D3D11_USAGE_DEFAULT;
845  buf_desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
846  buf_desc.ByteWidth = sizeof(vertices);
847  init_data.pSysMem = vertices;
848 
849  hr = ID3D11Device_CreateBuffer(dda->device_hwctx->device,
850  &buf_desc,
851  &init_data,
852  &mouse_vertex_buffer);
853  if (FAILED(hr)) {
854  av_log(avctx, AV_LOG_ERROR, "CreateBuffer failed: %lx\n", hr);
856  goto end;
857  }
858 
859  ID3D11DeviceContext_IASetVertexBuffers(devctx, 0, 1, &mouse_vertex_buffer, &stride, &offset);
860  ID3D11DeviceContext_IASetInputLayout(devctx, dda->input_layout);
861  ID3D11DeviceContext_IASetPrimitiveTopology(devctx, D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
862  }
863 
864  ID3D11DeviceContext_VSSetShader(devctx, dda->vertex_shader, NULL, 0);
865  ID3D11DeviceContext_VSSetConstantBuffers(devctx, 0, 1, &dda->const_buffer);
866  ID3D11DeviceContext_PSSetSamplers(devctx, 0, 1, &dda->sampler_state);
867  ID3D11DeviceContext_PSSetShaderResources(devctx, 0, 1, &dda->mouse_resource_view);
868  ID3D11DeviceContext_PSSetShader(devctx, dda->pixel_shader, NULL, 0);
869 
870  ID3D11DeviceContext_OMSetBlendState(devctx, dda->blend_state, NULL, 0xFFFFFFFF);
871  ID3D11DeviceContext_OMSetRenderTargets(devctx, 1, &target_view, NULL);
872 
873  ID3D11DeviceContext_Draw(devctx, num_vertices, 0);
874 
875 end:
876  release_resource(&mouse_vertex_buffer);
877  release_resource(&target_view);
878 
879  return ret;
880 }
881 
883 {
884  AVFilterContext *avctx = outlink->src;
885  DdagrabContext *dda = avctx->priv;
886 
887  ID3D11Texture2D *cur_texture = NULL;
888  D3D11_TEXTURE2D_DESC desc = { 0 };
889  D3D11_BOX box = { 0 };
890 
891  int64_t time_frame = dda->time_frame;
892  int64_t now, delay;
893  AVFrame *frame = NULL;
894  HRESULT hr;
895  int ret;
896 
897  /* time_frame is in units of microseconds divided by the time_base.
898  * This means that adding a clean 1M to it is the equivalent of adding
899  * 1M*time_base microseconds to it, except it avoids all rounding error.
900  * The only time rounding error occurs is when multiplying to calculate
901  * the delay. So any rounding error there corrects itself over time.
902  */
903  time_frame += TIMER_RES64;
904  for (;;) {
905  now = av_gettime_relative();
906  delay = time_frame * av_q2d(dda->time_base) - now;
907  if (delay <= 0) {
908  if (delay < -TIMER_RES64 * av_q2d(dda->time_base)) {
909  time_frame += TIMER_RES64;
910  }
911  break;
912  }
913  av_usleep(delay);
914  }
915 
916  if (!dda->first_pts)
917  dda->first_pts = now;
918  now -= dda->first_pts;
919 
920  if (!dda->probed_texture) {
921  ret = next_frame_internal(avctx, &cur_texture);
922  } else {
923  cur_texture = dda->probed_texture;
924  dda->probed_texture = NULL;
925  ret = 0;
926  }
927 
928  if (ret == AVERROR(EAGAIN) && dda->last_frame->buf[0]) {
929  frame = av_frame_alloc();
930  if (!frame)
931  return AVERROR(ENOMEM);
932 
933  ret = av_frame_ref(frame, dda->last_frame);
934  if (ret < 0) {
936  return ret;
937  }
938 
939  av_log(avctx, AV_LOG_DEBUG, "Duplicated output frame\n");
940 
941  goto frame_done;
942  } else if (ret == AVERROR(EAGAIN)) {
943  av_log(avctx, AV_LOG_VERBOSE, "Initial DDA AcquireNextFrame timeout!\n");
944  return AVERROR(EAGAIN);
945  } else if (ret < 0) {
946  return ret;
947  }
948 
949  // AcquireNextFrame sometimes has bursts of delay.
950  // This increases accuracy of the timestamp, but might upset consumers due to more jittery framerate?
951  now = av_gettime_relative() - dda->first_pts;
952 
953  ID3D11Texture2D_GetDesc(cur_texture, &desc);
954  if (desc.Format != dda->raw_format ||
955  (int)desc.Width != dda->raw_width ||
956  (int)desc.Height != dda->raw_height) {
957  av_log(avctx, AV_LOG_ERROR, "Output parameters changed!");
959  goto fail;
960  }
961 
962  frame = ff_get_video_buffer(outlink, dda->width, dda->height);
963  if (!frame) {
964  ret = AVERROR(ENOMEM);
965  goto fail;
966  }
967 
968  box.left = dda->offset_x;
969  box.top = dda->offset_y;
970  box.right = box.left + dda->width;
971  box.bottom = box.top + dda->height;
972  box.front = 0;
973  box.back = 1;
974 
975  ID3D11DeviceContext_CopySubresourceRegion(
977  (ID3D11Resource*)frame->data[0], (UINT)(intptr_t)frame->data[1],
978  0, 0, 0,
979  (ID3D11Resource*)cur_texture, 0,
980  &box);
981 
982  release_resource(&cur_texture);
983 
984  hr = IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
985  if (FAILED(hr)) {
986  av_log(avctx, AV_LOG_ERROR, "DDA ReleaseFrame failed!\n");
988  goto fail;
989  }
990 
991  if (dda->draw_mouse) {
992  ret = draw_mouse_pointer(avctx, frame);
993  if (ret < 0)
994  goto fail;
995  }
996 
997  frame->sample_aspect_ratio = (AVRational){1, 1};
998 
999  if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM ||
1000  desc.Format == DXGI_FORMAT_R10G10B10A2_UNORM) {
1001  // According to MSDN, all integer formats contain sRGB image data
1002  frame->color_range = AVCOL_RANGE_JPEG;
1003  frame->color_primaries = AVCOL_PRI_BT709;
1004  frame->color_trc = AVCOL_TRC_IEC61966_2_1;
1005  frame->colorspace = AVCOL_SPC_RGB;
1006  } else if(desc.Format == DXGI_FORMAT_R16G16B16A16_FLOAT) {
1007  // According to MSDN, all floating point formats contain sRGB image data with linear 1.0 gamma.
1008  frame->color_range = AVCOL_RANGE_JPEG;
1009  frame->color_primaries = AVCOL_PRI_BT709;
1010  frame->color_trc = AVCOL_TRC_LINEAR;
1011  frame->colorspace = AVCOL_SPC_RGB;
1012  } else {
1013  ret = AVERROR_BUG;
1014  goto fail;
1015  }
1016 
1017  av_frame_unref(dda->last_frame);
1018  ret = av_frame_ref(dda->last_frame, frame);
1019  if (ret < 0)
1020  return ret;
1021 
1022 frame_done:
1023  frame->pts = now;
1024  dda->time_frame = time_frame;
1025 
1026  return ff_filter_frame(outlink, frame);
1027 
1028 fail:
1029  if (frame)
1030  av_frame_free(&frame);
1031 
1032  if (cur_texture)
1033  IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
1034 
1035  release_resource(&cur_texture);
1036  return ret;
1037 }
1038 
1039 static const AVFilterPad ddagrab_outputs[] = {
1040  {
1041  .name = "default",
1042  .type = AVMEDIA_TYPE_VIDEO,
1043  .request_frame = ddagrab_request_frame,
1044  .config_props = ddagrab_config_props,
1045  },
1046 };
1047 
1049  .name = "ddagrab",
1050  .description = NULL_IF_CONFIG_SMALL("Grab Windows Desktop images using Desktop Duplication API"),
1051  .priv_size = sizeof(DdagrabContext),
1052  .priv_class = &ddagrab_class,
1053  .init = ddagrab_init,
1055  .inputs = NULL,
1058  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1059 };
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
DdagrabContext::raw_height
int raw_height
Definition: vsrc_ddagrab.c:82
av_gettime_relative
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
Definition: time.c:56
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
TIMER_RES
#define TIMER_RES
Definition: vsrc_ddagrab.c:53
DdagrabContext::force_fmt
int force_fmt
Definition: vsrc_ddagrab.c:102
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
draw_mouse_pointer
static int draw_mouse_pointer(AVFilterContext *avctx, AVFrame *frame)
Definition: vsrc_ddagrab.c:776
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:374
init_render_resources
static av_cold int init_render_resources(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:329
ConstBufferData::width
float width
Definition: vsrc_ddagrab.c:317
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:969
DXGI_FORMAT_B8G8R8A8_UNORM
@ DXGI_FORMAT_B8G8R8A8_UNORM
Definition: dds.c:91
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:567
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:238
DdagrabContext::frames_ctx
AVHWFramesContext * frames_ctx
Definition: vsrc_ddagrab.c:64
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
DdagrabContext::blend_state
ID3D11BlendState * blend_state
Definition: vsrc_ddagrab.c:91
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
DdagrabContext::device_ctx
AVHWDeviceContext * device_ctx
Definition: vsrc_ddagrab.c:60
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
AVOption
AVOption.
Definition: opt.h:251
DdagrabContext::frames_hwctx
AVD3D11VAFramesContext * frames_hwctx
Definition: vsrc_ddagrab.c:65
ddagrab_request_frame
static int ddagrab_request_frame(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:882
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:588
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:448
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
DdagrabContext::width
int width
Definition: vsrc_ddagrab.c:96
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
DdagrabContext::first_pts
int64_t first_pts
Definition: vsrc_ddagrab.c:78
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:165
ff_vsrc_ddagrab
const AVFilter ff_vsrc_ddagrab
Definition: vsrc_ddagrab.c:1048
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
video.h
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:539
DdagrabContext::last_frame
AVFrame * last_frame
Definition: vsrc_ddagrab.c:69
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(ddagrab)
formats.h
framerate
int framerate
Definition: h264_levels.c:65
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
vsrc_ddagrab_shaders.h
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:572
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:407
fail
#define fail()
Definition: checkasm.h:134
DdagrabContext::pixel_shader
ID3D11PixelShader * pixel_shader
Definition: vsrc_ddagrab.c:88
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
DdagrabContext::framerate
AVRational framerate
Definition: vsrc_ddagrab.c:95
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
avassert.h
DdagrabContext::device_hwctx
AVD3D11VADeviceContext * device_hwctx
Definition: vsrc_ddagrab.c:61
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ddagrab_options
static const AVOption ddagrab_options[]
Definition: vsrc_ddagrab.c:107
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
width
#define width
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
DdagrabContext::raw_width
int raw_width
Definition: vsrc_ddagrab.c:81
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
create_d3d11_pointer_tex
static int create_d3d11_pointer_tex(AVFilterContext *avctx, uint8_t *buf, DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info, ID3D11Texture2D **out_tex, ID3D11ShaderResourceView **res_view)
Definition: vsrc_ddagrab.c:434
DdagrabContext::dxgi_outdupl
IDXGIOutputDuplication * dxgi_outdupl
Definition: vsrc_ddagrab.c:68
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
vertex_shader_input_layout
static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[]
Definition: vsrc_ddagrab.c:323
ConstBufferData
Definition: vsrc_ddagrab.c:315
next_frame_internal
static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture)
Definition: vsrc_ddagrab.c:588
DdagrabContext::mouse_x
int mouse_x
Definition: vsrc_ddagrab.c:71
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
if
if(ret)
Definition: filter_design.txt:179
ConstBufferData::height
float height
Definition: vsrc_ddagrab.c:318
fixup_color_mask
static void fixup_color_mask(uint8_t *buf, int width, int height, int pitch)
Definition: vsrc_ddagrab.c:513
DdagrabContext::vertex_shader
ID3D11VertexShader * vertex_shader
Definition: vsrc_ddagrab.c:86
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ddagrab_config_props
static int ddagrab_config_props(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:701
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
DdagrabContext::mouse_texture
ID3D11Texture2D * mouse_texture
Definition: vsrc_ddagrab.c:72
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
DdagrabContext::const_buffer
ID3D11Buffer * const_buffer
Definition: vsrc_ddagrab.c:89
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:535
time.h
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
ddagrab_uninit
static av_cold void ddagrab_uninit(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:140
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:517
DdagrabContext::mouse_y
int mouse_y
Definition: vsrc_ddagrab.c:71
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:344
release_resource
static void release_resource(void *resource)
Definition: vsrc_ddagrab.c:131
DdagrabContext::out_fmt
int out_fmt
Definition: vsrc_ddagrab.c:100
size
int size
Definition: twinvq_data.h:10344
AVERROR_OUTPUT_CHANGED
#define AVERROR_OUTPUT_CHANGED
Output changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_INPUT_CHANGED)
Definition: error.h:76
TIMER_RES64
#define TIMER_RES64
Definition: vsrc_ddagrab.c:54
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:333
DdagrabContext::time_base
AVRational time_base
Definition: vsrc_ddagrab.c:75
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
FLAGS
#define FLAGS
Definition: vsrc_ddagrab.c:106
internal.h
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: internal.h:184
DdagrabContext::raw_format
DXGI_FORMAT raw_format
Definition: vsrc_ddagrab.c:80
buffer_data
Definition: avio_read_callback.c:36
DXGI_FORMAT_R16G16B16A16_FLOAT
@ DXGI_FORMAT_R16G16B16A16_FLOAT
Definition: dds.c:62
DdagrabContext::mouse_resource_view
ID3D11ShaderResourceView * mouse_resource_view
Definition: vsrc_ddagrab.c:73
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
convert_mono_buffer
static uint8_t * convert_mono_buffer(uint8_t *input, int *_width, int *_height, int *_pitch)
Definition: vsrc_ddagrab.c:487
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:478
DdagrabContext::draw_mouse
int draw_mouse
Definition: vsrc_ddagrab.c:94
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
DdagrabContext::allow_fallback
int allow_fallback
Definition: vsrc_ddagrab.c:101
FLOAT
float FLOAT
Definition: faandct.c:32
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
DdagrabContext::offset_y
int offset_y
Definition: vsrc_ddagrab.c:99
stride
#define stride
Definition: h264pred_template.c:537
DdagrabContext::time_timeout
int64_t time_timeout
Definition: vsrc_ddagrab.c:77
AVFilter
Filter definition.
Definition: avfilter.h:161
DdagrabContext::output_idx
int output_idx
Definition: vsrc_ddagrab.c:93
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
init_dxgi_dda
static av_cold int init_dxgi_dda(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
pos
unsigned int pos
Definition: spdifenc.c:413
DdagrabContext::input_layout
ID3D11InputLayout * input_layout
Definition: vsrc_ddagrab.c:87
DdagrabContext::frames_ref
AVBufferRef * frames_ref
Definition: vsrc_ddagrab.c:63
DdagrabContext::time_frame
int64_t time_frame
Definition: vsrc_ddagrab.c:76
DdagrabContext::height
int height
Definition: vsrc_ddagrab.c:97
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
desc
const char * desc
Definition: libsvtav1.c:83
pixel_shader_bytes
static const uint8_t pixel_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:101
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
vertex_shader_bytes
static const uint8_t vertex_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:63
init_hwframes_ctx
static av_cold int init_hwframes_ctx(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:653
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
update_mouse_pointer
static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
Definition: vsrc_ddagrab.c:526
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:524
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:195
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
DdagrabContext::sampler_state
ID3D11SamplerState * sampler_state
Definition: vsrc_ddagrab.c:90
ddagrab_outputs
static const AVFilterPad ddagrab_outputs[]
Definition: vsrc_ddagrab.c:1039
DdagrabContext::offset_x
int offset_x
Definition: vsrc_ddagrab.c:98
DdagrabContext::probed_texture
ID3D11Texture2D * probed_texture
Definition: vsrc_ddagrab.c:84
ddagrab_init
static av_cold int ddagrab_init(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:420
DdagrabContext::output_desc
DXGI_OUTPUT_DESC output_desc
Definition: vsrc_ddagrab.c:67
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
DdagrabContext::device_ref
AVBufferRef * device_ref
Definition: vsrc_ddagrab.c:59
ConstBufferData::padding
uint64_t padding
Definition: vsrc_ddagrab.c:320
AVD3D11VADeviceContext::device_context
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
Definition: hwcontext_d3d11va.h:64
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:285
h
h
Definition: vp9dsp_template.c:2038
DdagrabContext
Definition: vsrc_ddagrab.c:56
avstring.h
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
hwcontext_d3d11va.h
OFFSET
#define OFFSET(x)
Definition: vsrc_ddagrab.c:105
probe_output_format
static int probe_output_format(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:625
w32dlfcn.h