FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
videodsp_init.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2002-2012 Michael Niedermayer
3  * Copyright (C) 2012 Ronald S. Bultje
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "config.h"
23 #include "libavutil/attributes.h"
24 #include "libavutil/avassert.h"
25 #include "libavutil/common.h"
26 #include "libavutil/cpu.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/x86/asm.h"
29 #include "libavutil/x86/cpu.h"
30 #include "libavcodec/videodsp.h"
31 
32 #if HAVE_YASM
33 typedef void emu_edge_vfix_func(uint8_t *dst, x86_reg dst_stride,
34  const uint8_t *src, x86_reg src_stride,
35  x86_reg start_y, x86_reg end_y, x86_reg bh);
36 typedef void emu_edge_vvar_func(uint8_t *dst, x86_reg dst_stride,
37  const uint8_t *src, x86_reg src_stride,
38  x86_reg start_y, x86_reg end_y, x86_reg bh,
39  x86_reg w);
40 
41 extern emu_edge_vfix_func ff_emu_edge_vfix1_mmx;
42 extern emu_edge_vfix_func ff_emu_edge_vfix2_mmx;
43 extern emu_edge_vfix_func ff_emu_edge_vfix3_mmx;
44 extern emu_edge_vfix_func ff_emu_edge_vfix4_mmx;
45 extern emu_edge_vfix_func ff_emu_edge_vfix5_mmx;
46 extern emu_edge_vfix_func ff_emu_edge_vfix6_mmx;
47 extern emu_edge_vfix_func ff_emu_edge_vfix7_mmx;
48 extern emu_edge_vfix_func ff_emu_edge_vfix8_mmx;
49 extern emu_edge_vfix_func ff_emu_edge_vfix9_mmx;
50 extern emu_edge_vfix_func ff_emu_edge_vfix10_mmx;
51 extern emu_edge_vfix_func ff_emu_edge_vfix11_mmx;
52 extern emu_edge_vfix_func ff_emu_edge_vfix12_mmx;
53 extern emu_edge_vfix_func ff_emu_edge_vfix13_mmx;
54 extern emu_edge_vfix_func ff_emu_edge_vfix14_mmx;
55 extern emu_edge_vfix_func ff_emu_edge_vfix15_mmx;
56 extern emu_edge_vfix_func ff_emu_edge_vfix16_mmx;
57 extern emu_edge_vfix_func ff_emu_edge_vfix17_mmx;
58 extern emu_edge_vfix_func ff_emu_edge_vfix18_mmx;
59 extern emu_edge_vfix_func ff_emu_edge_vfix19_mmx;
60 extern emu_edge_vfix_func ff_emu_edge_vfix20_mmx;
61 extern emu_edge_vfix_func ff_emu_edge_vfix21_mmx;
62 extern emu_edge_vfix_func ff_emu_edge_vfix22_mmx;
63 #if ARCH_X86_32
64 static emu_edge_vfix_func *vfixtbl_mmx[22] = {
65  &ff_emu_edge_vfix1_mmx, &ff_emu_edge_vfix2_mmx, &ff_emu_edge_vfix3_mmx,
66  &ff_emu_edge_vfix4_mmx, &ff_emu_edge_vfix5_mmx, &ff_emu_edge_vfix6_mmx,
67  &ff_emu_edge_vfix7_mmx, &ff_emu_edge_vfix8_mmx, &ff_emu_edge_vfix9_mmx,
68  &ff_emu_edge_vfix10_mmx, &ff_emu_edge_vfix11_mmx, &ff_emu_edge_vfix12_mmx,
69  &ff_emu_edge_vfix13_mmx, &ff_emu_edge_vfix14_mmx, &ff_emu_edge_vfix15_mmx,
70  &ff_emu_edge_vfix16_mmx, &ff_emu_edge_vfix17_mmx, &ff_emu_edge_vfix18_mmx,
71  &ff_emu_edge_vfix19_mmx, &ff_emu_edge_vfix20_mmx, &ff_emu_edge_vfix21_mmx,
72  &ff_emu_edge_vfix22_mmx
73 };
74 #endif
75 extern emu_edge_vvar_func ff_emu_edge_vvar_mmx;
76 extern emu_edge_vfix_func ff_emu_edge_vfix16_sse;
77 extern emu_edge_vfix_func ff_emu_edge_vfix17_sse;
78 extern emu_edge_vfix_func ff_emu_edge_vfix18_sse;
79 extern emu_edge_vfix_func ff_emu_edge_vfix19_sse;
80 extern emu_edge_vfix_func ff_emu_edge_vfix20_sse;
81 extern emu_edge_vfix_func ff_emu_edge_vfix21_sse;
82 extern emu_edge_vfix_func ff_emu_edge_vfix22_sse;
83 static emu_edge_vfix_func *vfixtbl_sse[22] = {
84  ff_emu_edge_vfix1_mmx, ff_emu_edge_vfix2_mmx, ff_emu_edge_vfix3_mmx,
85  ff_emu_edge_vfix4_mmx, ff_emu_edge_vfix5_mmx, ff_emu_edge_vfix6_mmx,
86  ff_emu_edge_vfix7_mmx, ff_emu_edge_vfix8_mmx, ff_emu_edge_vfix9_mmx,
87  ff_emu_edge_vfix10_mmx, ff_emu_edge_vfix11_mmx, ff_emu_edge_vfix12_mmx,
88  ff_emu_edge_vfix13_mmx, ff_emu_edge_vfix14_mmx, ff_emu_edge_vfix15_mmx,
89  ff_emu_edge_vfix16_sse, ff_emu_edge_vfix17_sse, ff_emu_edge_vfix18_sse,
90  ff_emu_edge_vfix19_sse, ff_emu_edge_vfix20_sse, ff_emu_edge_vfix21_sse,
91  ff_emu_edge_vfix22_sse
92 };
93 extern emu_edge_vvar_func ff_emu_edge_vvar_sse;
94 
95 typedef void emu_edge_hfix_func(uint8_t *dst, x86_reg dst_stride,
96  x86_reg start_x, x86_reg bh);
97 typedef void emu_edge_hvar_func(uint8_t *dst, x86_reg dst_stride,
98  x86_reg start_x, x86_reg n_words, x86_reg bh);
99 
100 extern emu_edge_hfix_func ff_emu_edge_hfix2_mmx;
101 extern emu_edge_hfix_func ff_emu_edge_hfix4_mmx;
102 extern emu_edge_hfix_func ff_emu_edge_hfix6_mmx;
103 extern emu_edge_hfix_func ff_emu_edge_hfix8_mmx;
104 extern emu_edge_hfix_func ff_emu_edge_hfix10_mmx;
105 extern emu_edge_hfix_func ff_emu_edge_hfix12_mmx;
106 extern emu_edge_hfix_func ff_emu_edge_hfix14_mmx;
107 extern emu_edge_hfix_func ff_emu_edge_hfix16_mmx;
108 extern emu_edge_hfix_func ff_emu_edge_hfix18_mmx;
109 extern emu_edge_hfix_func ff_emu_edge_hfix20_mmx;
110 extern emu_edge_hfix_func ff_emu_edge_hfix22_mmx;
111 #if ARCH_X86_32
112 static emu_edge_hfix_func *hfixtbl_mmx[11] = {
113  ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
114  ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
115  ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_mmx, ff_emu_edge_hfix18_mmx,
116  ff_emu_edge_hfix20_mmx, ff_emu_edge_hfix22_mmx
117 };
118 #endif
119 extern emu_edge_hvar_func ff_emu_edge_hvar_mmx;
120 extern emu_edge_hfix_func ff_emu_edge_hfix16_sse2;
121 extern emu_edge_hfix_func ff_emu_edge_hfix18_sse2;
122 extern emu_edge_hfix_func ff_emu_edge_hfix20_sse2;
123 extern emu_edge_hfix_func ff_emu_edge_hfix22_sse2;
124 static emu_edge_hfix_func *hfixtbl_sse2[11] = {
125  ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
126  ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
127  ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_sse2, ff_emu_edge_hfix18_sse2,
128  ff_emu_edge_hfix20_sse2, ff_emu_edge_hfix22_sse2
129 };
130 extern emu_edge_hvar_func ff_emu_edge_hvar_sse2;
131 
132 static av_always_inline void emulated_edge_mc(uint8_t *dst, const uint8_t *src,
133  ptrdiff_t dst_stride,
134  ptrdiff_t src_stride,
135  x86_reg block_w, x86_reg block_h,
136  x86_reg src_x, x86_reg src_y,
137  x86_reg w, x86_reg h,
138  emu_edge_vfix_func **vfix_tbl,
139  emu_edge_vvar_func *v_extend_var,
140  emu_edge_hfix_func **hfix_tbl,
141  emu_edge_hvar_func *h_extend_var)
142 {
143  x86_reg start_y, start_x, end_y, end_x, src_y_add = 0, p;
144 
145  if (!w || !h)
146  return;
147 
148  if (src_y >= h) {
149  src -= src_y*src_stride;
150  src_y_add = h - 1;
151  src_y = h - 1;
152  } else if (src_y <= -block_h) {
153  src -= src_y*src_stride;
154  src_y_add = 1 - block_h;
155  src_y = 1 - block_h;
156  }
157  if (src_x >= w) {
158  src += w - 1 - src_x;
159  src_x = w - 1;
160  } else if (src_x <= -block_w) {
161  src += 1 - block_w - src_x;
162  src_x = 1 - block_w;
163  }
164 
165  start_y = FFMAX(0, -src_y);
166  start_x = FFMAX(0, -src_x);
167  end_y = FFMIN(block_h, h-src_y);
168  end_x = FFMIN(block_w, w-src_x);
169  av_assert2(start_x < end_x && block_w > 0);
170  av_assert2(start_y < end_y && block_h > 0);
171 
172  // fill in the to-be-copied part plus all above/below
173  src += (src_y_add + start_y) * src_stride + start_x;
174  w = end_x - start_x;
175  if (w <= 22) {
176  vfix_tbl[w - 1](dst + start_x, dst_stride, src, src_stride,
177  start_y, end_y, block_h);
178  } else {
179  v_extend_var(dst + start_x, dst_stride, src, src_stride,
180  start_y, end_y, block_h, w);
181  }
182 
183  // fill left
184  if (start_x) {
185  if (start_x <= 22) {
186  hfix_tbl[(start_x - 1) >> 1](dst, dst_stride, start_x, block_h);
187  } else {
188  h_extend_var(dst, dst_stride,
189  start_x, (start_x + 1) >> 1, block_h);
190  }
191  }
192 
193  // fill right
194  p = block_w - end_x;
195  if (p) {
196  if (p <= 22) {
197  hfix_tbl[(p - 1) >> 1](dst + end_x - (p & 1), dst_stride,
198  -!(p & 1), block_h);
199  } else {
200  h_extend_var(dst + end_x - (p & 1), dst_stride,
201  -!(p & 1), (p + 1) >> 1, block_h);
202  }
203  }
204 }
205 
206 #if ARCH_X86_32
207 static av_noinline void emulated_edge_mc_mmx(uint8_t *buf, const uint8_t *src,
208  ptrdiff_t buf_stride,
209  ptrdiff_t src_stride,
210  int block_w, int block_h,
211  int src_x, int src_y, int w, int h)
212 {
213  emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
214  src_x, src_y, w, h, vfixtbl_mmx, &ff_emu_edge_vvar_mmx,
215  hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
216 }
217 
218 static av_noinline void emulated_edge_mc_sse(uint8_t *buf, const uint8_t *src,
219  ptrdiff_t buf_stride,
220  ptrdiff_t src_stride,
221  int block_w, int block_h,
222  int src_x, int src_y, int w, int h)
223 {
224  emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
225  src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
226  hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
227 }
228 #endif
229 
230 static av_noinline void emulated_edge_mc_sse2(uint8_t *buf, const uint8_t *src,
231  ptrdiff_t buf_stride,
232  ptrdiff_t src_stride,
233  int block_w, int block_h,
234  int src_x, int src_y, int w,
235  int h)
236 {
237  emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
238  src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
239  hfixtbl_sse2, &ff_emu_edge_hvar_sse2);
240 }
241 #endif /* HAVE_YASM */
242 
243 void ff_prefetch_mmxext(uint8_t *buf, ptrdiff_t stride, int h);
244 void ff_prefetch_3dnow(uint8_t *buf, ptrdiff_t stride, int h);
245 
247 {
248 #if HAVE_YASM
249  int cpu_flags = av_get_cpu_flags();
250 
251 #if ARCH_X86_32
252  if (EXTERNAL_MMX(cpu_flags) && bpc <= 8) {
253  ctx->emulated_edge_mc = emulated_edge_mc_mmx;
254  }
255  if (EXTERNAL_AMD3DNOW(cpu_flags)) {
257  }
258 #endif /* ARCH_X86_32 */
259  if (EXTERNAL_MMXEXT(cpu_flags)) {
261  }
262 #if ARCH_X86_32
263  if (EXTERNAL_SSE(cpu_flags) && bpc <= 8) {
264  ctx->emulated_edge_mc = emulated_edge_mc_sse;
265  }
266 #endif /* ARCH_X86_32 */
267  if (EXTERNAL_SSE2(cpu_flags) && bpc <= 8) {
268  ctx->emulated_edge_mc = emulated_edge_mc_sse2;
269  }
270 #endif /* HAVE_YASM */
271 }